1pub mod row_chunk;
2
3use crate::{
4 DebuggerTextObject, LanguageScope, Outline, OutlineConfig, PLAIN_TEXT, RunnableCapture,
5 RunnableTag, TextObject, TreeSitterOptions,
6 diagnostic_set::{DiagnosticEntry, DiagnosticEntryRef, DiagnosticGroup},
7 language_settings::{LanguageSettings, language_settings},
8 outline::OutlineItem,
9 row_chunk::RowChunks,
10 syntax_map::{
11 SyntaxLayer, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures, SyntaxMapMatch,
12 SyntaxMapMatches, SyntaxSnapshot, ToTreeSitterPoint,
13 },
14 task_context::RunnableRange,
15 text_diff::text_diff,
16 unified_diff,
17};
18pub use crate::{
19 Grammar, Language, LanguageRegistry,
20 diagnostic_set::DiagnosticSet,
21 highlight_map::{HighlightId, HighlightMap},
22 proto,
23};
24use anyhow::{Context as _, Result};
25use clock::Lamport;
26pub use clock::ReplicaId;
27use collections::{HashMap, HashSet};
28use fs::MTime;
29use futures::channel::oneshot;
30use gpui::{
31 App, AppContext as _, Context, Entity, EventEmitter, HighlightStyle, SharedString, StyledText,
32 Task, TaskLabel, TextStyle,
33};
34
35use lsp::{LanguageServerId, NumberOrString};
36use parking_lot::Mutex;
37use serde::{Deserialize, Serialize};
38use serde_json::Value;
39use settings::WorktreeId;
40use smallvec::SmallVec;
41use smol::future::yield_now;
42use std::{
43 any::Any,
44 borrow::Cow,
45 cell::Cell,
46 cmp::{self, Ordering, Reverse},
47 collections::{BTreeMap, BTreeSet},
48 future::Future,
49 iter::{self, Iterator, Peekable},
50 mem,
51 num::NonZeroU32,
52 ops::{Deref, Range},
53 path::PathBuf,
54 rc,
55 sync::{Arc, LazyLock},
56 time::{Duration, Instant},
57 vec,
58};
59use sum_tree::TreeMap;
60use text::operation_queue::OperationQueue;
61use text::*;
62pub use text::{
63 Anchor, Bias, Buffer as TextBuffer, BufferId, BufferSnapshot as TextBufferSnapshot, Edit,
64 LineIndent, OffsetRangeExt, OffsetUtf16, Patch, Point, PointUtf16, Rope, Selection,
65 SelectionGoal, Subscription, TextDimension, TextSummary, ToOffset, ToOffsetUtf16, ToPoint,
66 ToPointUtf16, Transaction, TransactionId, Unclipped,
67};
68use theme::{ActiveTheme as _, SyntaxTheme};
69#[cfg(any(test, feature = "test-support"))]
70use util::RandomCharIter;
71use util::{RangeExt, debug_panic, maybe, paths::PathStyle, rel_path::RelPath};
72
73#[cfg(any(test, feature = "test-support"))]
74pub use {tree_sitter_python, tree_sitter_rust, tree_sitter_typescript};
75
76pub use lsp::DiagnosticSeverity;
77
78/// A label for the background task spawned by the buffer to compute
79/// a diff against the contents of its file.
80pub static BUFFER_DIFF_TASK: LazyLock<TaskLabel> = LazyLock::new(TaskLabel::new);
81
82/// Indicate whether a [`Buffer`] has permissions to edit.
83#[derive(PartialEq, Clone, Copy, Debug)]
84pub enum Capability {
85 /// The buffer is a mutable replica.
86 ReadWrite,
87 /// The buffer is a read-only replica.
88 ReadOnly,
89}
90
91pub type BufferRow = u32;
92
93/// An in-memory representation of a source code file, including its text,
94/// syntax trees, git status, and diagnostics.
95pub struct Buffer {
96 text: TextBuffer,
97 branch_state: Option<BufferBranchState>,
98 /// Filesystem state, `None` when there is no path.
99 file: Option<Arc<dyn File>>,
100 /// The mtime of the file when this buffer was last loaded from
101 /// or saved to disk.
102 saved_mtime: Option<MTime>,
103 /// The version vector when this buffer was last loaded from
104 /// or saved to disk.
105 saved_version: clock::Global,
106 preview_version: clock::Global,
107 transaction_depth: usize,
108 was_dirty_before_starting_transaction: Option<bool>,
109 reload_task: Option<Task<Result<()>>>,
110 language: Option<Arc<Language>>,
111 autoindent_requests: Vec<Arc<AutoindentRequest>>,
112 wait_for_autoindent_txs: Vec<oneshot::Sender<()>>,
113 pending_autoindent: Option<Task<()>>,
114 sync_parse_timeout: Duration,
115 syntax_map: Mutex<SyntaxMap>,
116 reparse: Option<Task<()>>,
117 parse_status: (watch::Sender<ParseStatus>, watch::Receiver<ParseStatus>),
118 non_text_state_update_count: usize,
119 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
120 remote_selections: TreeMap<ReplicaId, SelectionSet>,
121 diagnostics_timestamp: clock::Lamport,
122 completion_triggers: BTreeSet<String>,
123 completion_triggers_per_language_server: HashMap<LanguageServerId, BTreeSet<String>>,
124 completion_triggers_timestamp: clock::Lamport,
125 deferred_ops: OperationQueue<Operation>,
126 capability: Capability,
127 has_conflict: bool,
128 /// Memoize calls to has_changes_since(saved_version).
129 /// The contents of a cell are (self.version, has_changes) at the time of a last call.
130 has_unsaved_edits: Cell<(clock::Global, bool)>,
131 change_bits: Vec<rc::Weak<Cell<bool>>>,
132 _subscriptions: Vec<gpui::Subscription>,
133 tree_sitter_data: Arc<TreeSitterData>,
134}
135
136#[derive(Debug)]
137pub struct TreeSitterData {
138 chunks: RowChunks,
139 brackets_by_chunks: Mutex<Vec<Option<Vec<BracketMatch<usize>>>>>,
140}
141
142const MAX_ROWS_IN_A_CHUNK: u32 = 50;
143
144impl TreeSitterData {
145 fn clear(&mut self, snapshot: text::BufferSnapshot) {
146 self.chunks = RowChunks::new(snapshot, MAX_ROWS_IN_A_CHUNK);
147 self.brackets_by_chunks.get_mut().clear();
148 self.brackets_by_chunks
149 .get_mut()
150 .resize(self.chunks.len(), None);
151 }
152
153 fn new(snapshot: text::BufferSnapshot) -> Self {
154 let chunks = RowChunks::new(snapshot, MAX_ROWS_IN_A_CHUNK);
155 Self {
156 brackets_by_chunks: Mutex::new(vec![None; chunks.len()]),
157 chunks,
158 }
159 }
160
161 fn version(&self) -> &clock::Global {
162 self.chunks.version()
163 }
164}
165
166#[derive(Copy, Clone, Debug, PartialEq, Eq)]
167pub enum ParseStatus {
168 Idle,
169 Parsing,
170}
171
172struct BufferBranchState {
173 base_buffer: Entity<Buffer>,
174 merged_operations: Vec<Lamport>,
175}
176
177/// An immutable, cheaply cloneable representation of a fixed
178/// state of a buffer.
179pub struct BufferSnapshot {
180 pub text: text::BufferSnapshot,
181 pub syntax: SyntaxSnapshot,
182 file: Option<Arc<dyn File>>,
183 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
184 remote_selections: TreeMap<ReplicaId, SelectionSet>,
185 language: Option<Arc<Language>>,
186 non_text_state_update_count: usize,
187 tree_sitter_data: Arc<TreeSitterData>,
188}
189
190/// The kind and amount of indentation in a particular line. For now,
191/// assumes that indentation is all the same character.
192#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
193pub struct IndentSize {
194 /// The number of bytes that comprise the indentation.
195 pub len: u32,
196 /// The kind of whitespace used for indentation.
197 pub kind: IndentKind,
198}
199
200/// A whitespace character that's used for indentation.
201#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
202pub enum IndentKind {
203 /// An ASCII space character.
204 #[default]
205 Space,
206 /// An ASCII tab character.
207 Tab,
208}
209
210/// The shape of a selection cursor.
211#[derive(Copy, Clone, Debug, Default, PartialEq, Eq)]
212pub enum CursorShape {
213 /// A vertical bar
214 #[default]
215 Bar,
216 /// A block that surrounds the following character
217 Block,
218 /// An underline that runs along the following character
219 Underline,
220 /// A box drawn around the following character
221 Hollow,
222}
223
224impl From<settings::CursorShape> for CursorShape {
225 fn from(shape: settings::CursorShape) -> Self {
226 match shape {
227 settings::CursorShape::Bar => CursorShape::Bar,
228 settings::CursorShape::Block => CursorShape::Block,
229 settings::CursorShape::Underline => CursorShape::Underline,
230 settings::CursorShape::Hollow => CursorShape::Hollow,
231 }
232 }
233}
234
235#[derive(Clone, Debug)]
236struct SelectionSet {
237 line_mode: bool,
238 cursor_shape: CursorShape,
239 selections: Arc<[Selection<Anchor>]>,
240 lamport_timestamp: clock::Lamport,
241}
242
243/// A diagnostic associated with a certain range of a buffer.
244#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
245pub struct Diagnostic {
246 /// The name of the service that produced this diagnostic.
247 pub source: Option<String>,
248 /// The ID provided by the dynamic registration that produced this diagnostic.
249 pub registration_id: Option<SharedString>,
250 /// A machine-readable code that identifies this diagnostic.
251 pub code: Option<NumberOrString>,
252 pub code_description: Option<lsp::Uri>,
253 /// Whether this diagnostic is a hint, warning, or error.
254 pub severity: DiagnosticSeverity,
255 /// The human-readable message associated with this diagnostic.
256 pub message: String,
257 /// The human-readable message (in markdown format)
258 pub markdown: Option<String>,
259 /// An id that identifies the group to which this diagnostic belongs.
260 ///
261 /// When a language server produces a diagnostic with
262 /// one or more associated diagnostics, those diagnostics are all
263 /// assigned a single group ID.
264 pub group_id: usize,
265 /// Whether this diagnostic is the primary diagnostic for its group.
266 ///
267 /// In a given group, the primary diagnostic is the top-level diagnostic
268 /// returned by the language server. The non-primary diagnostics are the
269 /// associated diagnostics.
270 pub is_primary: bool,
271 /// Whether this diagnostic is considered to originate from an analysis of
272 /// files on disk, as opposed to any unsaved buffer contents. This is a
273 /// property of a given diagnostic source, and is configured for a given
274 /// language server via the [`LspAdapter::disk_based_diagnostic_sources`](crate::LspAdapter::disk_based_diagnostic_sources) method
275 /// for the language server.
276 pub is_disk_based: bool,
277 /// Whether this diagnostic marks unnecessary code.
278 pub is_unnecessary: bool,
279 /// Quick separation of diagnostics groups based by their source.
280 pub source_kind: DiagnosticSourceKind,
281 /// Data from language server that produced this diagnostic. Passed back to the LS when we request code actions for this diagnostic.
282 pub data: Option<Value>,
283 /// Whether to underline the corresponding text range in the editor.
284 pub underline: bool,
285}
286
287#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)]
288pub enum DiagnosticSourceKind {
289 Pulled,
290 Pushed,
291 Other,
292}
293
294/// An operation used to synchronize this buffer with its other replicas.
295#[derive(Clone, Debug, PartialEq)]
296pub enum Operation {
297 /// A text operation.
298 Buffer(text::Operation),
299
300 /// An update to the buffer's diagnostics.
301 UpdateDiagnostics {
302 /// The id of the language server that produced the new diagnostics.
303 server_id: LanguageServerId,
304 /// The diagnostics.
305 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
306 /// The buffer's lamport timestamp.
307 lamport_timestamp: clock::Lamport,
308 },
309
310 /// An update to the most recent selections in this buffer.
311 UpdateSelections {
312 /// The selections.
313 selections: Arc<[Selection<Anchor>]>,
314 /// The buffer's lamport timestamp.
315 lamport_timestamp: clock::Lamport,
316 /// Whether the selections are in 'line mode'.
317 line_mode: bool,
318 /// The [`CursorShape`] associated with these selections.
319 cursor_shape: CursorShape,
320 },
321
322 /// An update to the characters that should trigger autocompletion
323 /// for this buffer.
324 UpdateCompletionTriggers {
325 /// The characters that trigger autocompletion.
326 triggers: Vec<String>,
327 /// The buffer's lamport timestamp.
328 lamport_timestamp: clock::Lamport,
329 /// The language server ID.
330 server_id: LanguageServerId,
331 },
332
333 /// An update to the line ending type of this buffer.
334 UpdateLineEnding {
335 /// The line ending type.
336 line_ending: LineEnding,
337 /// The buffer's lamport timestamp.
338 lamport_timestamp: clock::Lamport,
339 },
340}
341
342/// An event that occurs in a buffer.
343#[derive(Clone, Debug, PartialEq)]
344pub enum BufferEvent {
345 /// The buffer was changed in a way that must be
346 /// propagated to its other replicas.
347 Operation {
348 operation: Operation,
349 is_local: bool,
350 },
351 /// The buffer was edited.
352 Edited,
353 /// The buffer's `dirty` bit changed.
354 DirtyChanged,
355 /// The buffer was saved.
356 Saved,
357 /// The buffer's file was changed on disk.
358 FileHandleChanged,
359 /// The buffer was reloaded.
360 Reloaded,
361 /// The buffer is in need of a reload
362 ReloadNeeded,
363 /// The buffer's language was changed.
364 /// The boolean indicates whether this buffer did not have a language before, but does now.
365 LanguageChanged(bool),
366 /// The buffer's syntax trees were updated.
367 Reparsed,
368 /// The buffer's diagnostics were updated.
369 DiagnosticsUpdated,
370 /// The buffer gained or lost editing capabilities.
371 CapabilityChanged,
372}
373
374/// The file associated with a buffer.
375pub trait File: Send + Sync + Any {
376 /// Returns the [`LocalFile`] associated with this file, if the
377 /// file is local.
378 fn as_local(&self) -> Option<&dyn LocalFile>;
379
380 /// Returns whether this file is local.
381 fn is_local(&self) -> bool {
382 self.as_local().is_some()
383 }
384
385 /// Returns whether the file is new, exists in storage, or has been deleted. Includes metadata
386 /// only available in some states, such as modification time.
387 fn disk_state(&self) -> DiskState;
388
389 /// Returns the path of this file relative to the worktree's root directory.
390 fn path(&self) -> &Arc<RelPath>;
391
392 /// Returns the path of this file relative to the worktree's parent directory (this means it
393 /// includes the name of the worktree's root folder).
394 fn full_path(&self, cx: &App) -> PathBuf;
395
396 /// Returns the path style of this file.
397 fn path_style(&self, cx: &App) -> PathStyle;
398
399 /// Returns the last component of this handle's absolute path. If this handle refers to the root
400 /// of its worktree, then this method will return the name of the worktree itself.
401 fn file_name<'a>(&'a self, cx: &'a App) -> &'a str;
402
403 /// Returns the id of the worktree to which this file belongs.
404 ///
405 /// This is needed for looking up project-specific settings.
406 fn worktree_id(&self, cx: &App) -> WorktreeId;
407
408 /// Converts this file into a protobuf message.
409 fn to_proto(&self, cx: &App) -> rpc::proto::File;
410
411 /// Return whether Zed considers this to be a private file.
412 fn is_private(&self) -> bool;
413}
414
415/// The file's storage status - whether it's stored (`Present`), and if so when it was last
416/// modified. In the case where the file is not stored, it can be either `New` or `Deleted`. In the
417/// UI these two states are distinguished. For example, the buffer tab does not display a deletion
418/// indicator for new files.
419#[derive(Copy, Clone, Debug, PartialEq)]
420pub enum DiskState {
421 /// File created in Zed that has not been saved.
422 New,
423 /// File present on the filesystem.
424 Present { mtime: MTime },
425 /// Deleted file that was previously present.
426 Deleted,
427}
428
429impl DiskState {
430 /// Returns the file's last known modification time on disk.
431 pub fn mtime(self) -> Option<MTime> {
432 match self {
433 DiskState::New => None,
434 DiskState::Present { mtime } => Some(mtime),
435 DiskState::Deleted => None,
436 }
437 }
438
439 pub fn exists(&self) -> bool {
440 match self {
441 DiskState::New => false,
442 DiskState::Present { .. } => true,
443 DiskState::Deleted => false,
444 }
445 }
446}
447
448/// The file associated with a buffer, in the case where the file is on the local disk.
449pub trait LocalFile: File {
450 /// Returns the absolute path of this file
451 fn abs_path(&self, cx: &App) -> PathBuf;
452
453 /// Loads the file contents from disk and returns them as a UTF-8 encoded string.
454 fn load(&self, cx: &App) -> Task<Result<String>>;
455
456 /// Loads the file's contents from disk.
457 fn load_bytes(&self, cx: &App) -> Task<Result<Vec<u8>>>;
458}
459
460/// The auto-indent behavior associated with an editing operation.
461/// For some editing operations, each affected line of text has its
462/// indentation recomputed. For other operations, the entire block
463/// of edited text is adjusted uniformly.
464#[derive(Clone, Debug)]
465pub enum AutoindentMode {
466 /// Indent each line of inserted text.
467 EachLine,
468 /// Apply the same indentation adjustment to all of the lines
469 /// in a given insertion.
470 Block {
471 /// The original indentation column of the first line of each
472 /// insertion, if it has been copied.
473 ///
474 /// Knowing this makes it possible to preserve the relative indentation
475 /// of every line in the insertion from when it was copied.
476 ///
477 /// If the original indent column is `a`, and the first line of insertion
478 /// is then auto-indented to column `b`, then every other line of
479 /// the insertion will be auto-indented to column `b - a`
480 original_indent_columns: Vec<Option<u32>>,
481 },
482}
483
484#[derive(Clone)]
485struct AutoindentRequest {
486 before_edit: BufferSnapshot,
487 entries: Vec<AutoindentRequestEntry>,
488 is_block_mode: bool,
489 ignore_empty_lines: bool,
490}
491
492#[derive(Debug, Clone)]
493struct AutoindentRequestEntry {
494 /// A range of the buffer whose indentation should be adjusted.
495 range: Range<Anchor>,
496 /// Whether or not these lines should be considered brand new, for the
497 /// purpose of auto-indent. When text is not new, its indentation will
498 /// only be adjusted if the suggested indentation level has *changed*
499 /// since the edit was made.
500 first_line_is_new: bool,
501 indent_size: IndentSize,
502 original_indent_column: Option<u32>,
503}
504
505#[derive(Debug)]
506struct IndentSuggestion {
507 basis_row: u32,
508 delta: Ordering,
509 within_error: bool,
510}
511
512struct BufferChunkHighlights<'a> {
513 captures: SyntaxMapCaptures<'a>,
514 next_capture: Option<SyntaxMapCapture<'a>>,
515 stack: Vec<(usize, HighlightId)>,
516 highlight_maps: Vec<HighlightMap>,
517}
518
519/// An iterator that yields chunks of a buffer's text, along with their
520/// syntax highlights and diagnostic status.
521pub struct BufferChunks<'a> {
522 buffer_snapshot: Option<&'a BufferSnapshot>,
523 range: Range<usize>,
524 chunks: text::Chunks<'a>,
525 diagnostic_endpoints: Option<Peekable<vec::IntoIter<DiagnosticEndpoint>>>,
526 error_depth: usize,
527 warning_depth: usize,
528 information_depth: usize,
529 hint_depth: usize,
530 unnecessary_depth: usize,
531 underline: bool,
532 highlights: Option<BufferChunkHighlights<'a>>,
533}
534
535/// A chunk of a buffer's text, along with its syntax highlight and
536/// diagnostic status.
537#[derive(Clone, Debug, Default)]
538pub struct Chunk<'a> {
539 /// The text of the chunk.
540 pub text: &'a str,
541 /// The syntax highlighting style of the chunk.
542 pub syntax_highlight_id: Option<HighlightId>,
543 /// The highlight style that has been applied to this chunk in
544 /// the editor.
545 pub highlight_style: Option<HighlightStyle>,
546 /// The severity of diagnostic associated with this chunk, if any.
547 pub diagnostic_severity: Option<DiagnosticSeverity>,
548 /// A bitset of which characters are tabs in this string.
549 pub tabs: u128,
550 /// Bitmap of character indices in this chunk
551 pub chars: u128,
552 /// Whether this chunk of text is marked as unnecessary.
553 pub is_unnecessary: bool,
554 /// Whether this chunk of text was originally a tab character.
555 pub is_tab: bool,
556 /// Whether this chunk of text was originally an inlay.
557 pub is_inlay: bool,
558 /// Whether to underline the corresponding text range in the editor.
559 pub underline: bool,
560}
561
562/// A set of edits to a given version of a buffer, computed asynchronously.
563#[derive(Debug)]
564pub struct Diff {
565 pub base_version: clock::Global,
566 pub line_ending: LineEnding,
567 pub edits: Vec<(Range<usize>, Arc<str>)>,
568}
569
570#[derive(Debug, Clone, Copy)]
571pub(crate) struct DiagnosticEndpoint {
572 offset: usize,
573 is_start: bool,
574 underline: bool,
575 severity: DiagnosticSeverity,
576 is_unnecessary: bool,
577}
578
579/// A class of characters, used for characterizing a run of text.
580#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
581pub enum CharKind {
582 /// Whitespace.
583 Whitespace,
584 /// Punctuation.
585 Punctuation,
586 /// Word.
587 Word,
588}
589
590/// Context for character classification within a specific scope.
591#[derive(Copy, Clone, Eq, PartialEq, Debug)]
592pub enum CharScopeContext {
593 /// Character classification for completion queries.
594 ///
595 /// This context treats certain characters as word constituents that would
596 /// normally be considered punctuation, such as '-' in Tailwind classes
597 /// ("bg-yellow-100") or '.' in import paths ("foo.ts").
598 Completion,
599 /// Character classification for linked edits.
600 ///
601 /// This context handles characters that should be treated as part of
602 /// identifiers during linked editing operations, such as '.' in JSX
603 /// component names like `<Animated.View>`.
604 LinkedEdit,
605}
606
607/// A runnable is a set of data about a region that could be resolved into a task
608pub struct Runnable {
609 pub tags: SmallVec<[RunnableTag; 1]>,
610 pub language: Arc<Language>,
611 pub buffer: BufferId,
612}
613
614#[derive(Default, Clone, Debug)]
615pub struct HighlightedText {
616 pub text: SharedString,
617 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
618}
619
620#[derive(Default, Debug)]
621struct HighlightedTextBuilder {
622 pub text: String,
623 highlights: Vec<(Range<usize>, HighlightStyle)>,
624}
625
626impl HighlightedText {
627 pub fn from_buffer_range<T: ToOffset>(
628 range: Range<T>,
629 snapshot: &text::BufferSnapshot,
630 syntax_snapshot: &SyntaxSnapshot,
631 override_style: Option<HighlightStyle>,
632 syntax_theme: &SyntaxTheme,
633 ) -> Self {
634 let mut highlighted_text = HighlightedTextBuilder::default();
635 highlighted_text.add_text_from_buffer_range(
636 range,
637 snapshot,
638 syntax_snapshot,
639 override_style,
640 syntax_theme,
641 );
642 highlighted_text.build()
643 }
644
645 pub fn to_styled_text(&self, default_style: &TextStyle) -> StyledText {
646 gpui::StyledText::new(self.text.clone())
647 .with_default_highlights(default_style, self.highlights.iter().cloned())
648 }
649
650 /// Returns the first line without leading whitespace unless highlighted
651 /// and a boolean indicating if there are more lines after
652 pub fn first_line_preview(self) -> (Self, bool) {
653 let newline_ix = self.text.find('\n').unwrap_or(self.text.len());
654 let first_line = &self.text[..newline_ix];
655
656 // Trim leading whitespace, unless an edit starts prior to it.
657 let mut preview_start_ix = first_line.len() - first_line.trim_start().len();
658 if let Some((first_highlight_range, _)) = self.highlights.first() {
659 preview_start_ix = preview_start_ix.min(first_highlight_range.start);
660 }
661
662 let preview_text = &first_line[preview_start_ix..];
663 let preview_highlights = self
664 .highlights
665 .into_iter()
666 .skip_while(|(range, _)| range.end <= preview_start_ix)
667 .take_while(|(range, _)| range.start < newline_ix)
668 .filter_map(|(mut range, highlight)| {
669 range.start = range.start.saturating_sub(preview_start_ix);
670 range.end = range.end.min(newline_ix).saturating_sub(preview_start_ix);
671 if range.is_empty() {
672 None
673 } else {
674 Some((range, highlight))
675 }
676 });
677
678 let preview = Self {
679 text: SharedString::new(preview_text),
680 highlights: preview_highlights.collect(),
681 };
682
683 (preview, self.text.len() > newline_ix)
684 }
685}
686
687impl HighlightedTextBuilder {
688 pub fn build(self) -> HighlightedText {
689 HighlightedText {
690 text: self.text.into(),
691 highlights: self.highlights,
692 }
693 }
694
695 pub fn add_text_from_buffer_range<T: ToOffset>(
696 &mut self,
697 range: Range<T>,
698 snapshot: &text::BufferSnapshot,
699 syntax_snapshot: &SyntaxSnapshot,
700 override_style: Option<HighlightStyle>,
701 syntax_theme: &SyntaxTheme,
702 ) {
703 let range = range.to_offset(snapshot);
704 for chunk in Self::highlighted_chunks(range, snapshot, syntax_snapshot) {
705 let start = self.text.len();
706 self.text.push_str(chunk.text);
707 let end = self.text.len();
708
709 if let Some(highlight_style) = chunk
710 .syntax_highlight_id
711 .and_then(|id| id.style(syntax_theme))
712 {
713 let highlight_style = override_style.map_or(highlight_style, |override_style| {
714 highlight_style.highlight(override_style)
715 });
716 self.highlights.push((start..end, highlight_style));
717 } else if let Some(override_style) = override_style {
718 self.highlights.push((start..end, override_style));
719 }
720 }
721 }
722
723 fn highlighted_chunks<'a>(
724 range: Range<usize>,
725 snapshot: &'a text::BufferSnapshot,
726 syntax_snapshot: &'a SyntaxSnapshot,
727 ) -> BufferChunks<'a> {
728 let captures = syntax_snapshot.captures(range.clone(), snapshot, |grammar| {
729 grammar
730 .highlights_config
731 .as_ref()
732 .map(|config| &config.query)
733 });
734
735 let highlight_maps = captures
736 .grammars()
737 .iter()
738 .map(|grammar| grammar.highlight_map())
739 .collect();
740
741 BufferChunks::new(
742 snapshot.as_rope(),
743 range,
744 Some((captures, highlight_maps)),
745 false,
746 None,
747 )
748 }
749}
750
751#[derive(Clone)]
752pub struct EditPreview {
753 old_snapshot: text::BufferSnapshot,
754 applied_edits_snapshot: text::BufferSnapshot,
755 syntax_snapshot: SyntaxSnapshot,
756}
757
758impl EditPreview {
759 pub fn as_unified_diff(&self, edits: &[(Range<Anchor>, impl AsRef<str>)]) -> Option<String> {
760 let (first, _) = edits.first()?;
761 let (last, _) = edits.last()?;
762
763 let start = first.start.to_point(&self.old_snapshot);
764 let old_end = last.end.to_point(&self.old_snapshot);
765 let new_end = last
766 .end
767 .bias_right(&self.old_snapshot)
768 .to_point(&self.applied_edits_snapshot);
769
770 let start = Point::new(start.row.saturating_sub(3), 0);
771 let old_end = Point::new(old_end.row + 4, 0).min(self.old_snapshot.max_point());
772 let new_end = Point::new(new_end.row + 4, 0).min(self.applied_edits_snapshot.max_point());
773
774 Some(unified_diff(
775 &self
776 .old_snapshot
777 .text_for_range(start..old_end)
778 .collect::<String>(),
779 &self
780 .applied_edits_snapshot
781 .text_for_range(start..new_end)
782 .collect::<String>(),
783 ))
784 }
785
786 pub fn highlight_edits(
787 &self,
788 current_snapshot: &BufferSnapshot,
789 edits: &[(Range<Anchor>, impl AsRef<str>)],
790 include_deletions: bool,
791 cx: &App,
792 ) -> HighlightedText {
793 let Some(visible_range_in_preview_snapshot) = self.compute_visible_range(edits) else {
794 return HighlightedText::default();
795 };
796
797 let mut highlighted_text = HighlightedTextBuilder::default();
798
799 let visible_range_in_preview_snapshot =
800 visible_range_in_preview_snapshot.to_offset(&self.applied_edits_snapshot);
801 let mut offset_in_preview_snapshot = visible_range_in_preview_snapshot.start;
802
803 let insertion_highlight_style = HighlightStyle {
804 background_color: Some(cx.theme().status().created_background),
805 ..Default::default()
806 };
807 let deletion_highlight_style = HighlightStyle {
808 background_color: Some(cx.theme().status().deleted_background),
809 ..Default::default()
810 };
811 let syntax_theme = cx.theme().syntax();
812
813 for (range, edit_text) in edits {
814 let edit_new_end_in_preview_snapshot = range
815 .end
816 .bias_right(&self.old_snapshot)
817 .to_offset(&self.applied_edits_snapshot);
818 let edit_start_in_preview_snapshot =
819 edit_new_end_in_preview_snapshot - edit_text.as_ref().len();
820
821 let unchanged_range_in_preview_snapshot =
822 offset_in_preview_snapshot..edit_start_in_preview_snapshot;
823 if !unchanged_range_in_preview_snapshot.is_empty() {
824 highlighted_text.add_text_from_buffer_range(
825 unchanged_range_in_preview_snapshot,
826 &self.applied_edits_snapshot,
827 &self.syntax_snapshot,
828 None,
829 syntax_theme,
830 );
831 }
832
833 let range_in_current_snapshot = range.to_offset(current_snapshot);
834 if include_deletions && !range_in_current_snapshot.is_empty() {
835 highlighted_text.add_text_from_buffer_range(
836 range_in_current_snapshot,
837 ¤t_snapshot.text,
838 ¤t_snapshot.syntax,
839 Some(deletion_highlight_style),
840 syntax_theme,
841 );
842 }
843
844 if !edit_text.as_ref().is_empty() {
845 highlighted_text.add_text_from_buffer_range(
846 edit_start_in_preview_snapshot..edit_new_end_in_preview_snapshot,
847 &self.applied_edits_snapshot,
848 &self.syntax_snapshot,
849 Some(insertion_highlight_style),
850 syntax_theme,
851 );
852 }
853
854 offset_in_preview_snapshot = edit_new_end_in_preview_snapshot;
855 }
856
857 highlighted_text.add_text_from_buffer_range(
858 offset_in_preview_snapshot..visible_range_in_preview_snapshot.end,
859 &self.applied_edits_snapshot,
860 &self.syntax_snapshot,
861 None,
862 syntax_theme,
863 );
864
865 highlighted_text.build()
866 }
867
868 pub fn build_result_buffer(&self, cx: &mut App) -> Entity<Buffer> {
869 cx.new(|cx| {
870 let mut buffer = Buffer::local_normalized(
871 self.applied_edits_snapshot.as_rope().clone(),
872 self.applied_edits_snapshot.line_ending(),
873 cx,
874 );
875 buffer.set_language_async(self.syntax_snapshot.root_language(), cx);
876 buffer
877 })
878 }
879
880 pub fn compute_visible_range<T>(&self, edits: &[(Range<Anchor>, T)]) -> Option<Range<Point>> {
881 let (first, _) = edits.first()?;
882 let (last, _) = edits.last()?;
883
884 let start = first
885 .start
886 .bias_left(&self.old_snapshot)
887 .to_point(&self.applied_edits_snapshot);
888 let end = last
889 .end
890 .bias_right(&self.old_snapshot)
891 .to_point(&self.applied_edits_snapshot);
892
893 // Ensure that the first line of the first edit and the last line of the last edit are always fully visible
894 let range = Point::new(start.row, 0)
895 ..Point::new(end.row, self.applied_edits_snapshot.line_len(end.row));
896
897 Some(range)
898 }
899}
900
901#[derive(Clone, Debug, PartialEq, Eq)]
902pub struct BracketMatch<T> {
903 pub open_range: Range<T>,
904 pub close_range: Range<T>,
905 pub newline_only: bool,
906 pub syntax_layer_depth: usize,
907 pub color_index: Option<usize>,
908}
909
910impl<T> BracketMatch<T> {
911 pub fn bracket_ranges(self) -> (Range<T>, Range<T>) {
912 (self.open_range, self.close_range)
913 }
914}
915
916impl Buffer {
917 /// Create a new buffer with the given base text.
918 pub fn local<T: Into<String>>(base_text: T, cx: &Context<Self>) -> Self {
919 Self::build(
920 TextBuffer::new(
921 ReplicaId::LOCAL,
922 cx.entity_id().as_non_zero_u64().into(),
923 base_text.into(),
924 ),
925 None,
926 Capability::ReadWrite,
927 )
928 }
929
930 /// Create a new buffer with the given base text that has proper line endings and other normalization applied.
931 pub fn local_normalized(
932 base_text_normalized: Rope,
933 line_ending: LineEnding,
934 cx: &Context<Self>,
935 ) -> Self {
936 Self::build(
937 TextBuffer::new_normalized(
938 ReplicaId::LOCAL,
939 cx.entity_id().as_non_zero_u64().into(),
940 line_ending,
941 base_text_normalized,
942 ),
943 None,
944 Capability::ReadWrite,
945 )
946 }
947
948 /// Create a new buffer that is a replica of a remote buffer.
949 pub fn remote(
950 remote_id: BufferId,
951 replica_id: ReplicaId,
952 capability: Capability,
953 base_text: impl Into<String>,
954 ) -> Self {
955 Self::build(
956 TextBuffer::new(replica_id, remote_id, base_text.into()),
957 None,
958 capability,
959 )
960 }
961
962 /// Create a new buffer that is a replica of a remote buffer, populating its
963 /// state from the given protobuf message.
964 pub fn from_proto(
965 replica_id: ReplicaId,
966 capability: Capability,
967 message: proto::BufferState,
968 file: Option<Arc<dyn File>>,
969 ) -> Result<Self> {
970 let buffer_id = BufferId::new(message.id).context("Could not deserialize buffer_id")?;
971 let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text);
972 let mut this = Self::build(buffer, file, capability);
973 this.text.set_line_ending(proto::deserialize_line_ending(
974 rpc::proto::LineEnding::from_i32(message.line_ending).context("missing line_ending")?,
975 ));
976 this.saved_version = proto::deserialize_version(&message.saved_version);
977 this.saved_mtime = message.saved_mtime.map(|time| time.into());
978 Ok(this)
979 }
980
981 /// Serialize the buffer's state to a protobuf message.
982 pub fn to_proto(&self, cx: &App) -> proto::BufferState {
983 proto::BufferState {
984 id: self.remote_id().into(),
985 file: self.file.as_ref().map(|f| f.to_proto(cx)),
986 base_text: self.base_text().to_string(),
987 line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
988 saved_version: proto::serialize_version(&self.saved_version),
989 saved_mtime: self.saved_mtime.map(|time| time.into()),
990 }
991 }
992
993 /// Serialize as protobufs all of the changes to the buffer since the given version.
994 pub fn serialize_ops(
995 &self,
996 since: Option<clock::Global>,
997 cx: &App,
998 ) -> Task<Vec<proto::Operation>> {
999 let mut operations = Vec::new();
1000 operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
1001
1002 operations.extend(self.remote_selections.iter().map(|(_, set)| {
1003 proto::serialize_operation(&Operation::UpdateSelections {
1004 selections: set.selections.clone(),
1005 lamport_timestamp: set.lamport_timestamp,
1006 line_mode: set.line_mode,
1007 cursor_shape: set.cursor_shape,
1008 })
1009 }));
1010
1011 for (server_id, diagnostics) in &self.diagnostics {
1012 operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
1013 lamport_timestamp: self.diagnostics_timestamp,
1014 server_id: *server_id,
1015 diagnostics: diagnostics.iter().cloned().collect(),
1016 }));
1017 }
1018
1019 for (server_id, completions) in &self.completion_triggers_per_language_server {
1020 operations.push(proto::serialize_operation(
1021 &Operation::UpdateCompletionTriggers {
1022 triggers: completions.iter().cloned().collect(),
1023 lamport_timestamp: self.completion_triggers_timestamp,
1024 server_id: *server_id,
1025 },
1026 ));
1027 }
1028
1029 let text_operations = self.text.operations().clone();
1030 cx.background_spawn(async move {
1031 let since = since.unwrap_or_default();
1032 operations.extend(
1033 text_operations
1034 .iter()
1035 .filter(|(_, op)| !since.observed(op.timestamp()))
1036 .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
1037 );
1038 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
1039 operations
1040 })
1041 }
1042
1043 /// Assign a language to the buffer, returning the buffer.
1044 pub fn with_language_async(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
1045 self.set_language_async(Some(language), cx);
1046 self
1047 }
1048
1049 /// Assign a language to the buffer, blocking for up to 1ms to reparse the buffer, returning the buffer.
1050 pub fn with_language(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
1051 self.set_language(Some(language), cx);
1052 self
1053 }
1054
1055 /// Returns the [`Capability`] of this buffer.
1056 pub fn capability(&self) -> Capability {
1057 self.capability
1058 }
1059
1060 /// Whether this buffer can only be read.
1061 pub fn read_only(&self) -> bool {
1062 self.capability == Capability::ReadOnly
1063 }
1064
1065 /// Builds a [`Buffer`] with the given underlying [`TextBuffer`], diff base, [`File`] and [`Capability`].
1066 pub fn build(buffer: TextBuffer, file: Option<Arc<dyn File>>, capability: Capability) -> Self {
1067 let saved_mtime = file.as_ref().and_then(|file| file.disk_state().mtime());
1068 let snapshot = buffer.snapshot();
1069 let syntax_map = Mutex::new(SyntaxMap::new(&snapshot));
1070 let tree_sitter_data = TreeSitterData::new(snapshot);
1071 Self {
1072 saved_mtime,
1073 tree_sitter_data: Arc::new(tree_sitter_data),
1074 saved_version: buffer.version(),
1075 preview_version: buffer.version(),
1076 reload_task: None,
1077 transaction_depth: 0,
1078 was_dirty_before_starting_transaction: None,
1079 has_unsaved_edits: Cell::new((buffer.version(), false)),
1080 text: buffer,
1081 branch_state: None,
1082 file,
1083 capability,
1084 syntax_map,
1085 reparse: None,
1086 non_text_state_update_count: 0,
1087 sync_parse_timeout: Duration::from_millis(1),
1088 parse_status: watch::channel(ParseStatus::Idle),
1089 autoindent_requests: Default::default(),
1090 wait_for_autoindent_txs: Default::default(),
1091 pending_autoindent: Default::default(),
1092 language: None,
1093 remote_selections: Default::default(),
1094 diagnostics: Default::default(),
1095 diagnostics_timestamp: Lamport::MIN,
1096 completion_triggers: Default::default(),
1097 completion_triggers_per_language_server: Default::default(),
1098 completion_triggers_timestamp: Lamport::MIN,
1099 deferred_ops: OperationQueue::new(),
1100 has_conflict: false,
1101 change_bits: Default::default(),
1102 _subscriptions: Vec::new(),
1103 }
1104 }
1105
1106 pub fn build_snapshot(
1107 text: Rope,
1108 language: Option<Arc<Language>>,
1109 language_registry: Option<Arc<LanguageRegistry>>,
1110 cx: &mut App,
1111 ) -> impl Future<Output = BufferSnapshot> + use<> {
1112 let entity_id = cx.reserve_entity::<Self>().entity_id();
1113 let buffer_id = entity_id.as_non_zero_u64().into();
1114 async move {
1115 let text =
1116 TextBuffer::new_normalized(ReplicaId::LOCAL, buffer_id, Default::default(), text)
1117 .snapshot();
1118 let mut syntax = SyntaxMap::new(&text).snapshot();
1119 if let Some(language) = language.clone() {
1120 let language_registry = language_registry.clone();
1121 syntax.reparse(&text, language_registry, language);
1122 }
1123 let tree_sitter_data = TreeSitterData::new(text.clone());
1124 BufferSnapshot {
1125 text,
1126 syntax,
1127 file: None,
1128 diagnostics: Default::default(),
1129 remote_selections: Default::default(),
1130 tree_sitter_data: Arc::new(tree_sitter_data),
1131 language,
1132 non_text_state_update_count: 0,
1133 }
1134 }
1135 }
1136
1137 pub fn build_empty_snapshot(cx: &mut App) -> BufferSnapshot {
1138 let entity_id = cx.reserve_entity::<Self>().entity_id();
1139 let buffer_id = entity_id.as_non_zero_u64().into();
1140 let text = TextBuffer::new_normalized(
1141 ReplicaId::LOCAL,
1142 buffer_id,
1143 Default::default(),
1144 Rope::new(),
1145 )
1146 .snapshot();
1147 let syntax = SyntaxMap::new(&text).snapshot();
1148 let tree_sitter_data = TreeSitterData::new(text.clone());
1149 BufferSnapshot {
1150 text,
1151 syntax,
1152 tree_sitter_data: Arc::new(tree_sitter_data),
1153 file: None,
1154 diagnostics: Default::default(),
1155 remote_selections: Default::default(),
1156 language: None,
1157 non_text_state_update_count: 0,
1158 }
1159 }
1160
1161 #[cfg(any(test, feature = "test-support"))]
1162 pub fn build_snapshot_sync(
1163 text: Rope,
1164 language: Option<Arc<Language>>,
1165 language_registry: Option<Arc<LanguageRegistry>>,
1166 cx: &mut App,
1167 ) -> BufferSnapshot {
1168 let entity_id = cx.reserve_entity::<Self>().entity_id();
1169 let buffer_id = entity_id.as_non_zero_u64().into();
1170 let text =
1171 TextBuffer::new_normalized(ReplicaId::LOCAL, buffer_id, Default::default(), text)
1172 .snapshot();
1173 let mut syntax = SyntaxMap::new(&text).snapshot();
1174 if let Some(language) = language.clone() {
1175 syntax.reparse(&text, language_registry, language);
1176 }
1177 let tree_sitter_data = TreeSitterData::new(text.clone());
1178 BufferSnapshot {
1179 text,
1180 syntax,
1181 tree_sitter_data: Arc::new(tree_sitter_data),
1182 file: None,
1183 diagnostics: Default::default(),
1184 remote_selections: Default::default(),
1185 language,
1186 non_text_state_update_count: 0,
1187 }
1188 }
1189
1190 /// Retrieve a snapshot of the buffer's current state. This is computationally
1191 /// cheap, and allows reading from the buffer on a background thread.
1192 pub fn snapshot(&self) -> BufferSnapshot {
1193 let text = self.text.snapshot();
1194 let mut syntax_map = self.syntax_map.lock();
1195 syntax_map.interpolate(&text);
1196 let syntax = syntax_map.snapshot();
1197
1198 let tree_sitter_data = if self.text.version() != *self.tree_sitter_data.version() {
1199 Arc::new(TreeSitterData::new(text.clone()))
1200 } else {
1201 self.tree_sitter_data.clone()
1202 };
1203
1204 BufferSnapshot {
1205 text,
1206 syntax,
1207 tree_sitter_data,
1208 file: self.file.clone(),
1209 remote_selections: self.remote_selections.clone(),
1210 diagnostics: self.diagnostics.clone(),
1211 language: self.language.clone(),
1212 non_text_state_update_count: self.non_text_state_update_count,
1213 }
1214 }
1215
1216 pub fn branch(&mut self, cx: &mut Context<Self>) -> Entity<Self> {
1217 let this = cx.entity();
1218 cx.new(|cx| {
1219 let mut branch = Self {
1220 branch_state: Some(BufferBranchState {
1221 base_buffer: this.clone(),
1222 merged_operations: Default::default(),
1223 }),
1224 language: self.language.clone(),
1225 has_conflict: self.has_conflict,
1226 has_unsaved_edits: Cell::new(self.has_unsaved_edits.get_mut().clone()),
1227 _subscriptions: vec![cx.subscribe(&this, Self::on_base_buffer_event)],
1228 ..Self::build(self.text.branch(), self.file.clone(), self.capability())
1229 };
1230 if let Some(language_registry) = self.language_registry() {
1231 branch.set_language_registry(language_registry);
1232 }
1233
1234 // Reparse the branch buffer so that we get syntax highlighting immediately.
1235 branch.reparse(cx, true);
1236
1237 branch
1238 })
1239 }
1240
1241 pub fn preview_edits(
1242 &self,
1243 edits: Arc<[(Range<Anchor>, Arc<str>)]>,
1244 cx: &App,
1245 ) -> Task<EditPreview> {
1246 let registry = self.language_registry();
1247 let language = self.language().cloned();
1248 let old_snapshot = self.text.snapshot();
1249 let mut branch_buffer = self.text.branch();
1250 let mut syntax_snapshot = self.syntax_map.lock().snapshot();
1251 cx.background_spawn(async move {
1252 if !edits.is_empty() {
1253 if let Some(language) = language.clone() {
1254 syntax_snapshot.reparse(&old_snapshot, registry.clone(), language);
1255 }
1256
1257 branch_buffer.edit(edits.iter().cloned());
1258 let snapshot = branch_buffer.snapshot();
1259 syntax_snapshot.interpolate(&snapshot);
1260
1261 if let Some(language) = language {
1262 syntax_snapshot.reparse(&snapshot, registry, language);
1263 }
1264 }
1265 EditPreview {
1266 old_snapshot,
1267 applied_edits_snapshot: branch_buffer.snapshot(),
1268 syntax_snapshot,
1269 }
1270 })
1271 }
1272
1273 /// Applies all of the changes in this buffer that intersect any of the
1274 /// given `ranges` to its base buffer.
1275 ///
1276 /// If `ranges` is empty, then all changes will be applied. This buffer must
1277 /// be a branch buffer to call this method.
1278 pub fn merge_into_base(&mut self, ranges: Vec<Range<usize>>, cx: &mut Context<Self>) {
1279 let Some(base_buffer) = self.base_buffer() else {
1280 debug_panic!("not a branch buffer");
1281 return;
1282 };
1283
1284 let mut ranges = if ranges.is_empty() {
1285 &[0..usize::MAX]
1286 } else {
1287 ranges.as_slice()
1288 }
1289 .iter()
1290 .peekable();
1291
1292 let mut edits = Vec::new();
1293 for edit in self.edits_since::<usize>(&base_buffer.read(cx).version()) {
1294 let mut is_included = false;
1295 while let Some(range) = ranges.peek() {
1296 if range.end < edit.new.start {
1297 ranges.next().unwrap();
1298 } else {
1299 if range.start <= edit.new.end {
1300 is_included = true;
1301 }
1302 break;
1303 }
1304 }
1305
1306 if is_included {
1307 edits.push((
1308 edit.old.clone(),
1309 self.text_for_range(edit.new.clone()).collect::<String>(),
1310 ));
1311 }
1312 }
1313
1314 let operation = base_buffer.update(cx, |base_buffer, cx| {
1315 // cx.emit(BufferEvent::DiffBaseChanged);
1316 base_buffer.edit(edits, None, cx)
1317 });
1318
1319 if let Some(operation) = operation
1320 && let Some(BufferBranchState {
1321 merged_operations, ..
1322 }) = &mut self.branch_state
1323 {
1324 merged_operations.push(operation);
1325 }
1326 }
1327
1328 fn on_base_buffer_event(
1329 &mut self,
1330 _: Entity<Buffer>,
1331 event: &BufferEvent,
1332 cx: &mut Context<Self>,
1333 ) {
1334 let BufferEvent::Operation { operation, .. } = event else {
1335 return;
1336 };
1337 let Some(BufferBranchState {
1338 merged_operations, ..
1339 }) = &mut self.branch_state
1340 else {
1341 return;
1342 };
1343
1344 let mut operation_to_undo = None;
1345 if let Operation::Buffer(text::Operation::Edit(operation)) = &operation
1346 && let Ok(ix) = merged_operations.binary_search(&operation.timestamp)
1347 {
1348 merged_operations.remove(ix);
1349 operation_to_undo = Some(operation.timestamp);
1350 }
1351
1352 self.apply_ops([operation.clone()], cx);
1353
1354 if let Some(timestamp) = operation_to_undo {
1355 let counts = [(timestamp, u32::MAX)].into_iter().collect();
1356 self.undo_operations(counts, cx);
1357 }
1358 }
1359
1360 #[cfg(test)]
1361 pub(crate) fn as_text_snapshot(&self) -> &text::BufferSnapshot {
1362 &self.text
1363 }
1364
1365 /// Retrieve a snapshot of the buffer's raw text, without any
1366 /// language-related state like the syntax tree or diagnostics.
1367 pub fn text_snapshot(&self) -> text::BufferSnapshot {
1368 self.text.snapshot()
1369 }
1370
1371 /// The file associated with the buffer, if any.
1372 pub fn file(&self) -> Option<&Arc<dyn File>> {
1373 self.file.as_ref()
1374 }
1375
1376 /// The version of the buffer that was last saved or reloaded from disk.
1377 pub fn saved_version(&self) -> &clock::Global {
1378 &self.saved_version
1379 }
1380
1381 /// The mtime of the buffer's file when the buffer was last saved or reloaded from disk.
1382 pub fn saved_mtime(&self) -> Option<MTime> {
1383 self.saved_mtime
1384 }
1385
1386 /// Assign a language to the buffer.
1387 pub fn set_language_async(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1388 self.set_language_(language, cfg!(any(test, feature = "test-support")), cx);
1389 }
1390
1391 /// Assign a language to the buffer, blocking for up to 1ms to reparse the buffer.
1392 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1393 self.set_language_(language, true, cx);
1394 }
1395
1396 fn set_language_(
1397 &mut self,
1398 language: Option<Arc<Language>>,
1399 may_block: bool,
1400 cx: &mut Context<Self>,
1401 ) {
1402 self.non_text_state_update_count += 1;
1403 self.syntax_map.lock().clear(&self.text);
1404 let old_language = std::mem::replace(&mut self.language, language);
1405 self.was_changed();
1406 self.reparse(cx, may_block);
1407 let has_fresh_language =
1408 self.language.is_some() && old_language.is_none_or(|old| old == *PLAIN_TEXT);
1409 cx.emit(BufferEvent::LanguageChanged(has_fresh_language));
1410 }
1411
1412 /// Assign a language registry to the buffer. This allows the buffer to retrieve
1413 /// other languages if parts of the buffer are written in different languages.
1414 pub fn set_language_registry(&self, language_registry: Arc<LanguageRegistry>) {
1415 self.syntax_map
1416 .lock()
1417 .set_language_registry(language_registry);
1418 }
1419
1420 pub fn language_registry(&self) -> Option<Arc<LanguageRegistry>> {
1421 self.syntax_map.lock().language_registry()
1422 }
1423
1424 /// Assign the line ending type to the buffer.
1425 pub fn set_line_ending(&mut self, line_ending: LineEnding, cx: &mut Context<Self>) {
1426 self.text.set_line_ending(line_ending);
1427
1428 let lamport_timestamp = self.text.lamport_clock.tick();
1429 self.send_operation(
1430 Operation::UpdateLineEnding {
1431 line_ending,
1432 lamport_timestamp,
1433 },
1434 true,
1435 cx,
1436 );
1437 }
1438
1439 /// Assign the buffer a new [`Capability`].
1440 pub fn set_capability(&mut self, capability: Capability, cx: &mut Context<Self>) {
1441 if self.capability != capability {
1442 self.capability = capability;
1443 cx.emit(BufferEvent::CapabilityChanged)
1444 }
1445 }
1446
1447 /// This method is called to signal that the buffer has been saved.
1448 pub fn did_save(
1449 &mut self,
1450 version: clock::Global,
1451 mtime: Option<MTime>,
1452 cx: &mut Context<Self>,
1453 ) {
1454 self.saved_version = version.clone();
1455 self.has_unsaved_edits.set((version, false));
1456 self.has_conflict = false;
1457 self.saved_mtime = mtime;
1458 self.was_changed();
1459 cx.emit(BufferEvent::Saved);
1460 cx.notify();
1461 }
1462
1463 /// Reloads the contents of the buffer from disk.
1464 pub fn reload(&mut self, cx: &Context<Self>) -> oneshot::Receiver<Option<Transaction>> {
1465 let (tx, rx) = futures::channel::oneshot::channel();
1466 let prev_version = self.text.version();
1467 self.reload_task = Some(cx.spawn(async move |this, cx| {
1468 let Some((new_mtime, new_text)) = this.update(cx, |this, cx| {
1469 let file = this.file.as_ref()?.as_local()?;
1470
1471 Some((file.disk_state().mtime(), file.load(cx)))
1472 })?
1473 else {
1474 return Ok(());
1475 };
1476
1477 let new_text = new_text.await?;
1478 let diff = this
1479 .update(cx, |this, cx| this.diff(new_text.clone(), cx))?
1480 .await;
1481 this.update(cx, |this, cx| {
1482 if this.version() == diff.base_version {
1483 this.finalize_last_transaction();
1484 this.apply_diff(diff, cx);
1485 tx.send(this.finalize_last_transaction().cloned()).ok();
1486 this.has_conflict = false;
1487 this.did_reload(this.version(), this.line_ending(), new_mtime, cx);
1488 } else {
1489 if !diff.edits.is_empty()
1490 || this
1491 .edits_since::<usize>(&diff.base_version)
1492 .next()
1493 .is_some()
1494 {
1495 this.has_conflict = true;
1496 }
1497
1498 this.did_reload(prev_version, this.line_ending(), this.saved_mtime, cx);
1499 }
1500
1501 this.reload_task.take();
1502 })
1503 }));
1504 rx
1505 }
1506
1507 /// This method is called to signal that the buffer has been reloaded.
1508 pub fn did_reload(
1509 &mut self,
1510 version: clock::Global,
1511 line_ending: LineEnding,
1512 mtime: Option<MTime>,
1513 cx: &mut Context<Self>,
1514 ) {
1515 self.saved_version = version;
1516 self.has_unsaved_edits
1517 .set((self.saved_version.clone(), false));
1518 self.text.set_line_ending(line_ending);
1519 self.saved_mtime = mtime;
1520 cx.emit(BufferEvent::Reloaded);
1521 cx.notify();
1522 }
1523
1524 /// Updates the [`File`] backing this buffer. This should be called when
1525 /// the file has changed or has been deleted.
1526 pub fn file_updated(&mut self, new_file: Arc<dyn File>, cx: &mut Context<Self>) {
1527 let was_dirty = self.is_dirty();
1528 let mut file_changed = false;
1529
1530 if let Some(old_file) = self.file.as_ref() {
1531 if new_file.path() != old_file.path() {
1532 file_changed = true;
1533 }
1534
1535 let old_state = old_file.disk_state();
1536 let new_state = new_file.disk_state();
1537 if old_state != new_state {
1538 file_changed = true;
1539 if !was_dirty && matches!(new_state, DiskState::Present { .. }) {
1540 cx.emit(BufferEvent::ReloadNeeded)
1541 }
1542 }
1543 } else {
1544 file_changed = true;
1545 };
1546
1547 self.file = Some(new_file);
1548 if file_changed {
1549 self.was_changed();
1550 self.non_text_state_update_count += 1;
1551 if was_dirty != self.is_dirty() {
1552 cx.emit(BufferEvent::DirtyChanged);
1553 }
1554 cx.emit(BufferEvent::FileHandleChanged);
1555 cx.notify();
1556 }
1557 }
1558
1559 pub fn base_buffer(&self) -> Option<Entity<Self>> {
1560 Some(self.branch_state.as_ref()?.base_buffer.clone())
1561 }
1562
1563 /// Returns the primary [`Language`] assigned to this [`Buffer`].
1564 pub fn language(&self) -> Option<&Arc<Language>> {
1565 self.language.as_ref()
1566 }
1567
1568 /// Returns the [`Language`] at the given location.
1569 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
1570 let offset = position.to_offset(self);
1571 let mut is_first = true;
1572 let start_anchor = self.anchor_before(offset);
1573 let end_anchor = self.anchor_after(offset);
1574 self.syntax_map
1575 .lock()
1576 .layers_for_range(offset..offset, &self.text, false)
1577 .filter(|layer| {
1578 if is_first {
1579 is_first = false;
1580 return true;
1581 }
1582
1583 layer
1584 .included_sub_ranges
1585 .map(|sub_ranges| {
1586 sub_ranges.iter().any(|sub_range| {
1587 let is_before_start = sub_range.end.cmp(&start_anchor, self).is_lt();
1588 let is_after_end = sub_range.start.cmp(&end_anchor, self).is_gt();
1589 !is_before_start && !is_after_end
1590 })
1591 })
1592 .unwrap_or(true)
1593 })
1594 .last()
1595 .map(|info| info.language.clone())
1596 .or_else(|| self.language.clone())
1597 }
1598
1599 /// Returns each [`Language`] for the active syntax layers at the given location.
1600 pub fn languages_at<D: ToOffset>(&self, position: D) -> Vec<Arc<Language>> {
1601 let offset = position.to_offset(self);
1602 let mut languages: Vec<Arc<Language>> = self
1603 .syntax_map
1604 .lock()
1605 .layers_for_range(offset..offset, &self.text, false)
1606 .map(|info| info.language.clone())
1607 .collect();
1608
1609 if languages.is_empty()
1610 && let Some(buffer_language) = self.language()
1611 {
1612 languages.push(buffer_language.clone());
1613 }
1614
1615 languages
1616 }
1617
1618 /// An integer version number that accounts for all updates besides
1619 /// the buffer's text itself (which is versioned via a version vector).
1620 pub fn non_text_state_update_count(&self) -> usize {
1621 self.non_text_state_update_count
1622 }
1623
1624 /// Whether the buffer is being parsed in the background.
1625 #[cfg(any(test, feature = "test-support"))]
1626 pub fn is_parsing(&self) -> bool {
1627 self.reparse.is_some()
1628 }
1629
1630 /// Indicates whether the buffer contains any regions that may be
1631 /// written in a language that hasn't been loaded yet.
1632 pub fn contains_unknown_injections(&self) -> bool {
1633 self.syntax_map.lock().contains_unknown_injections()
1634 }
1635
1636 #[cfg(any(test, feature = "test-support"))]
1637 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
1638 self.sync_parse_timeout = timeout;
1639 }
1640
1641 fn invalidate_tree_sitter_data(&mut self, snapshot: text::BufferSnapshot) {
1642 match Arc::get_mut(&mut self.tree_sitter_data) {
1643 Some(tree_sitter_data) => tree_sitter_data.clear(snapshot),
1644 None => {
1645 let tree_sitter_data = TreeSitterData::new(snapshot);
1646 self.tree_sitter_data = Arc::new(tree_sitter_data)
1647 }
1648 }
1649 }
1650
1651 /// Called after an edit to synchronize the buffer's main parse tree with
1652 /// the buffer's new underlying state.
1653 ///
1654 /// Locks the syntax map and interpolates the edits since the last reparse
1655 /// into the foreground syntax tree.
1656 ///
1657 /// Then takes a stable snapshot of the syntax map before unlocking it.
1658 /// The snapshot with the interpolated edits is sent to a background thread,
1659 /// where we ask Tree-sitter to perform an incremental parse.
1660 ///
1661 /// Meanwhile, in the foreground if `may_block` is true, we block the main
1662 /// thread for up to 1ms waiting on the parse to complete. As soon as it
1663 /// completes, we proceed synchronously, unless a 1ms timeout elapses.
1664 ///
1665 /// If we time out waiting on the parse, we spawn a second task waiting
1666 /// until the parse does complete and return with the interpolated tree still
1667 /// in the foreground. When the background parse completes, call back into
1668 /// the main thread and assign the foreground parse state.
1669 ///
1670 /// If the buffer or grammar changed since the start of the background parse,
1671 /// initiate an additional reparse recursively. To avoid concurrent parses
1672 /// for the same buffer, we only initiate a new parse if we are not already
1673 /// parsing in the background.
1674 pub fn reparse(&mut self, cx: &mut Context<Self>, may_block: bool) {
1675 if self.text.version() != *self.tree_sitter_data.version() {
1676 self.invalidate_tree_sitter_data(self.text.snapshot());
1677 }
1678 if self.reparse.is_some() {
1679 return;
1680 }
1681 let language = if let Some(language) = self.language.clone() {
1682 language
1683 } else {
1684 return;
1685 };
1686
1687 let text = self.text_snapshot();
1688 let parsed_version = self.version();
1689
1690 let mut syntax_map = self.syntax_map.lock();
1691 syntax_map.interpolate(&text);
1692 let language_registry = syntax_map.language_registry();
1693 let mut syntax_snapshot = syntax_map.snapshot();
1694 drop(syntax_map);
1695
1696 let parse_task = cx.background_spawn({
1697 let language = language.clone();
1698 let language_registry = language_registry.clone();
1699 async move {
1700 syntax_snapshot.reparse(&text, language_registry, language);
1701 syntax_snapshot
1702 }
1703 });
1704
1705 self.parse_status.0.send(ParseStatus::Parsing).unwrap();
1706 if may_block {
1707 match cx
1708 .background_executor()
1709 .block_with_timeout(self.sync_parse_timeout, parse_task)
1710 {
1711 Ok(new_syntax_snapshot) => {
1712 self.did_finish_parsing(new_syntax_snapshot, cx);
1713 self.reparse = None;
1714 }
1715 Err(parse_task) => {
1716 self.reparse = Some(cx.spawn(async move |this, cx| {
1717 let new_syntax_map = cx.background_spawn(parse_task).await;
1718 this.update(cx, move |this, cx| {
1719 let grammar_changed = || {
1720 this.language.as_ref().is_none_or(|current_language| {
1721 !Arc::ptr_eq(&language, current_language)
1722 })
1723 };
1724 let language_registry_changed = || {
1725 new_syntax_map.contains_unknown_injections()
1726 && language_registry.is_some_and(|registry| {
1727 registry.version()
1728 != new_syntax_map.language_registry_version()
1729 })
1730 };
1731 let parse_again = this.version.changed_since(&parsed_version)
1732 || language_registry_changed()
1733 || grammar_changed();
1734 this.did_finish_parsing(new_syntax_map, cx);
1735 this.reparse = None;
1736 if parse_again {
1737 this.reparse(cx, false);
1738 }
1739 })
1740 .ok();
1741 }));
1742 }
1743 }
1744 } else {
1745 self.reparse = Some(cx.spawn(async move |this, cx| {
1746 let new_syntax_map = cx.background_spawn(parse_task).await;
1747 this.update(cx, move |this, cx| {
1748 let grammar_changed = || {
1749 this.language.as_ref().is_none_or(|current_language| {
1750 !Arc::ptr_eq(&language, current_language)
1751 })
1752 };
1753 let language_registry_changed = || {
1754 new_syntax_map.contains_unknown_injections()
1755 && language_registry.is_some_and(|registry| {
1756 registry.version() != new_syntax_map.language_registry_version()
1757 })
1758 };
1759 let parse_again = this.version.changed_since(&parsed_version)
1760 || language_registry_changed()
1761 || grammar_changed();
1762 this.did_finish_parsing(new_syntax_map, cx);
1763 this.reparse = None;
1764 if parse_again {
1765 this.reparse(cx, false);
1766 }
1767 })
1768 .ok();
1769 }));
1770 }
1771 }
1772
1773 fn did_finish_parsing(&mut self, syntax_snapshot: SyntaxSnapshot, cx: &mut Context<Self>) {
1774 self.was_changed();
1775 self.non_text_state_update_count += 1;
1776 self.syntax_map.lock().did_parse(syntax_snapshot);
1777 self.request_autoindent(cx);
1778 self.parse_status.0.send(ParseStatus::Idle).unwrap();
1779 if self.text.version() != *self.tree_sitter_data.version() {
1780 self.invalidate_tree_sitter_data(self.text.snapshot());
1781 }
1782 cx.emit(BufferEvent::Reparsed);
1783 cx.notify();
1784 }
1785
1786 pub fn parse_status(&self) -> watch::Receiver<ParseStatus> {
1787 self.parse_status.1.clone()
1788 }
1789
1790 /// Wait until the buffer is no longer parsing
1791 pub fn parsing_idle(&self) -> impl Future<Output = ()> + use<> {
1792 let mut parse_status = self.parse_status();
1793 async move {
1794 while *parse_status.borrow() != ParseStatus::Idle {
1795 if parse_status.changed().await.is_err() {
1796 break;
1797 }
1798 }
1799 }
1800 }
1801
1802 /// Assign to the buffer a set of diagnostics created by a given language server.
1803 pub fn update_diagnostics(
1804 &mut self,
1805 server_id: LanguageServerId,
1806 diagnostics: DiagnosticSet,
1807 cx: &mut Context<Self>,
1808 ) {
1809 let lamport_timestamp = self.text.lamport_clock.tick();
1810 let op = Operation::UpdateDiagnostics {
1811 server_id,
1812 diagnostics: diagnostics.iter().cloned().collect(),
1813 lamport_timestamp,
1814 };
1815
1816 self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
1817 self.send_operation(op, true, cx);
1818 }
1819
1820 pub fn buffer_diagnostics(
1821 &self,
1822 for_server: Option<LanguageServerId>,
1823 ) -> Vec<&DiagnosticEntry<Anchor>> {
1824 match for_server {
1825 Some(server_id) => match self.diagnostics.binary_search_by_key(&server_id, |v| v.0) {
1826 Ok(idx) => self.diagnostics[idx].1.iter().collect(),
1827 Err(_) => Vec::new(),
1828 },
1829 None => self
1830 .diagnostics
1831 .iter()
1832 .flat_map(|(_, diagnostic_set)| diagnostic_set.iter())
1833 .collect(),
1834 }
1835 }
1836
1837 fn request_autoindent(&mut self, cx: &mut Context<Self>) {
1838 if let Some(indent_sizes) = self.compute_autoindents() {
1839 let indent_sizes = cx.background_spawn(indent_sizes);
1840 match cx
1841 .background_executor()
1842 .block_with_timeout(Duration::from_micros(500), indent_sizes)
1843 {
1844 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
1845 Err(indent_sizes) => {
1846 self.pending_autoindent = Some(cx.spawn(async move |this, cx| {
1847 let indent_sizes = indent_sizes.await;
1848 this.update(cx, |this, cx| {
1849 this.apply_autoindents(indent_sizes, cx);
1850 })
1851 .ok();
1852 }));
1853 }
1854 }
1855 } else {
1856 self.autoindent_requests.clear();
1857 for tx in self.wait_for_autoindent_txs.drain(..) {
1858 tx.send(()).ok();
1859 }
1860 }
1861 }
1862
1863 fn compute_autoindents(
1864 &self,
1865 ) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>> + use<>> {
1866 let max_rows_between_yields = 100;
1867 let snapshot = self.snapshot();
1868 if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
1869 return None;
1870 }
1871
1872 let autoindent_requests = self.autoindent_requests.clone();
1873 Some(async move {
1874 let mut indent_sizes = BTreeMap::<u32, (IndentSize, bool)>::new();
1875 for request in autoindent_requests {
1876 // Resolve each edited range to its row in the current buffer and in the
1877 // buffer before this batch of edits.
1878 let mut row_ranges = Vec::new();
1879 let mut old_to_new_rows = BTreeMap::new();
1880 let mut language_indent_sizes_by_new_row = Vec::new();
1881 for entry in &request.entries {
1882 let position = entry.range.start;
1883 let new_row = position.to_point(&snapshot).row;
1884 let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
1885 language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
1886
1887 if !entry.first_line_is_new {
1888 let old_row = position.to_point(&request.before_edit).row;
1889 old_to_new_rows.insert(old_row, new_row);
1890 }
1891 row_ranges.push((new_row..new_end_row, entry.original_indent_column));
1892 }
1893
1894 // Build a map containing the suggested indentation for each of the edited lines
1895 // with respect to the state of the buffer before these edits. This map is keyed
1896 // by the rows for these lines in the current state of the buffer.
1897 let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
1898 let old_edited_ranges =
1899 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
1900 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1901 let mut language_indent_size = IndentSize::default();
1902 for old_edited_range in old_edited_ranges {
1903 let suggestions = request
1904 .before_edit
1905 .suggest_autoindents(old_edited_range.clone())
1906 .into_iter()
1907 .flatten();
1908 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1909 if let Some(suggestion) = suggestion {
1910 let new_row = *old_to_new_rows.get(&old_row).unwrap();
1911
1912 // Find the indent size based on the language for this row.
1913 while let Some((row, size)) = language_indent_sizes.peek() {
1914 if *row > new_row {
1915 break;
1916 }
1917 language_indent_size = *size;
1918 language_indent_sizes.next();
1919 }
1920
1921 let suggested_indent = old_to_new_rows
1922 .get(&suggestion.basis_row)
1923 .and_then(|from_row| {
1924 Some(old_suggestions.get(from_row).copied()?.0)
1925 })
1926 .unwrap_or_else(|| {
1927 request
1928 .before_edit
1929 .indent_size_for_line(suggestion.basis_row)
1930 })
1931 .with_delta(suggestion.delta, language_indent_size);
1932 old_suggestions
1933 .insert(new_row, (suggested_indent, suggestion.within_error));
1934 }
1935 }
1936 yield_now().await;
1937 }
1938
1939 // Compute new suggestions for each line, but only include them in the result
1940 // if they differ from the old suggestion for that line.
1941 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1942 let mut language_indent_size = IndentSize::default();
1943 for (row_range, original_indent_column) in row_ranges {
1944 let new_edited_row_range = if request.is_block_mode {
1945 row_range.start..row_range.start + 1
1946 } else {
1947 row_range.clone()
1948 };
1949
1950 let suggestions = snapshot
1951 .suggest_autoindents(new_edited_row_range.clone())
1952 .into_iter()
1953 .flatten();
1954 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
1955 if let Some(suggestion) = suggestion {
1956 // Find the indent size based on the language for this row.
1957 while let Some((row, size)) = language_indent_sizes.peek() {
1958 if *row > new_row {
1959 break;
1960 }
1961 language_indent_size = *size;
1962 language_indent_sizes.next();
1963 }
1964
1965 let suggested_indent = indent_sizes
1966 .get(&suggestion.basis_row)
1967 .copied()
1968 .map(|e| e.0)
1969 .unwrap_or_else(|| {
1970 snapshot.indent_size_for_line(suggestion.basis_row)
1971 })
1972 .with_delta(suggestion.delta, language_indent_size);
1973
1974 if old_suggestions.get(&new_row).is_none_or(
1975 |(old_indentation, was_within_error)| {
1976 suggested_indent != *old_indentation
1977 && (!suggestion.within_error || *was_within_error)
1978 },
1979 ) {
1980 indent_sizes.insert(
1981 new_row,
1982 (suggested_indent, request.ignore_empty_lines),
1983 );
1984 }
1985 }
1986 }
1987
1988 if let (true, Some(original_indent_column)) =
1989 (request.is_block_mode, original_indent_column)
1990 {
1991 let new_indent =
1992 if let Some((indent, _)) = indent_sizes.get(&row_range.start) {
1993 *indent
1994 } else {
1995 snapshot.indent_size_for_line(row_range.start)
1996 };
1997 let delta = new_indent.len as i64 - original_indent_column as i64;
1998 if delta != 0 {
1999 for row in row_range.skip(1) {
2000 indent_sizes.entry(row).or_insert_with(|| {
2001 let mut size = snapshot.indent_size_for_line(row);
2002 if size.kind == new_indent.kind {
2003 match delta.cmp(&0) {
2004 Ordering::Greater => size.len += delta as u32,
2005 Ordering::Less => {
2006 size.len = size.len.saturating_sub(-delta as u32)
2007 }
2008 Ordering::Equal => {}
2009 }
2010 }
2011 (size, request.ignore_empty_lines)
2012 });
2013 }
2014 }
2015 }
2016
2017 yield_now().await;
2018 }
2019 }
2020
2021 indent_sizes
2022 .into_iter()
2023 .filter_map(|(row, (indent, ignore_empty_lines))| {
2024 if ignore_empty_lines && snapshot.line_len(row) == 0 {
2025 None
2026 } else {
2027 Some((row, indent))
2028 }
2029 })
2030 .collect()
2031 })
2032 }
2033
2034 fn apply_autoindents(
2035 &mut self,
2036 indent_sizes: BTreeMap<u32, IndentSize>,
2037 cx: &mut Context<Self>,
2038 ) {
2039 self.autoindent_requests.clear();
2040 for tx in self.wait_for_autoindent_txs.drain(..) {
2041 tx.send(()).ok();
2042 }
2043
2044 let edits: Vec<_> = indent_sizes
2045 .into_iter()
2046 .filter_map(|(row, indent_size)| {
2047 let current_size = indent_size_for_line(self, row);
2048 Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
2049 })
2050 .collect();
2051
2052 let preserve_preview = self.preserve_preview();
2053 self.edit(edits, None, cx);
2054 if preserve_preview {
2055 self.refresh_preview();
2056 }
2057 }
2058
2059 /// Create a minimal edit that will cause the given row to be indented
2060 /// with the given size. After applying this edit, the length of the line
2061 /// will always be at least `new_size.len`.
2062 pub fn edit_for_indent_size_adjustment(
2063 row: u32,
2064 current_size: IndentSize,
2065 new_size: IndentSize,
2066 ) -> Option<(Range<Point>, String)> {
2067 if new_size.kind == current_size.kind {
2068 match new_size.len.cmp(¤t_size.len) {
2069 Ordering::Greater => {
2070 let point = Point::new(row, 0);
2071 Some((
2072 point..point,
2073 iter::repeat(new_size.char())
2074 .take((new_size.len - current_size.len) as usize)
2075 .collect::<String>(),
2076 ))
2077 }
2078
2079 Ordering::Less => Some((
2080 Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
2081 String::new(),
2082 )),
2083
2084 Ordering::Equal => None,
2085 }
2086 } else {
2087 Some((
2088 Point::new(row, 0)..Point::new(row, current_size.len),
2089 iter::repeat(new_size.char())
2090 .take(new_size.len as usize)
2091 .collect::<String>(),
2092 ))
2093 }
2094 }
2095
2096 /// Spawns a background task that asynchronously computes a `Diff` between the buffer's text
2097 /// and the given new text.
2098 pub fn diff(&self, mut new_text: String, cx: &App) -> Task<Diff> {
2099 let old_text = self.as_rope().clone();
2100 let base_version = self.version();
2101 cx.background_executor()
2102 .spawn_labeled(*BUFFER_DIFF_TASK, async move {
2103 let old_text = old_text.to_string();
2104 let line_ending = LineEnding::detect(&new_text);
2105 LineEnding::normalize(&mut new_text);
2106 let edits = text_diff(&old_text, &new_text);
2107 Diff {
2108 base_version,
2109 line_ending,
2110 edits,
2111 }
2112 })
2113 }
2114
2115 /// Spawns a background task that searches the buffer for any whitespace
2116 /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
2117 pub fn remove_trailing_whitespace(&self, cx: &App) -> Task<Diff> {
2118 let old_text = self.as_rope().clone();
2119 let line_ending = self.line_ending();
2120 let base_version = self.version();
2121 cx.background_spawn(async move {
2122 let ranges = trailing_whitespace_ranges(&old_text);
2123 let empty = Arc::<str>::from("");
2124 Diff {
2125 base_version,
2126 line_ending,
2127 edits: ranges
2128 .into_iter()
2129 .map(|range| (range, empty.clone()))
2130 .collect(),
2131 }
2132 })
2133 }
2134
2135 /// Ensures that the buffer ends with a single newline character, and
2136 /// no other whitespace. Skips if the buffer is empty.
2137 pub fn ensure_final_newline(&mut self, cx: &mut Context<Self>) {
2138 let len = self.len();
2139 if len == 0 {
2140 return;
2141 }
2142 let mut offset = len;
2143 for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
2144 let non_whitespace_len = chunk
2145 .trim_end_matches(|c: char| c.is_ascii_whitespace())
2146 .len();
2147 offset -= chunk.len();
2148 offset += non_whitespace_len;
2149 if non_whitespace_len != 0 {
2150 if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
2151 return;
2152 }
2153 break;
2154 }
2155 }
2156 self.edit([(offset..len, "\n")], None, cx);
2157 }
2158
2159 /// Applies a diff to the buffer. If the buffer has changed since the given diff was
2160 /// calculated, then adjust the diff to account for those changes, and discard any
2161 /// parts of the diff that conflict with those changes.
2162 pub fn apply_diff(&mut self, diff: Diff, cx: &mut Context<Self>) -> Option<TransactionId> {
2163 let snapshot = self.snapshot();
2164 let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
2165 let mut delta = 0;
2166 let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
2167 while let Some(edit_since) = edits_since.peek() {
2168 // If the edit occurs after a diff hunk, then it does not
2169 // affect that hunk.
2170 if edit_since.old.start > range.end {
2171 break;
2172 }
2173 // If the edit precedes the diff hunk, then adjust the hunk
2174 // to reflect the edit.
2175 else if edit_since.old.end < range.start {
2176 delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
2177 edits_since.next();
2178 }
2179 // If the edit intersects a diff hunk, then discard that hunk.
2180 else {
2181 return None;
2182 }
2183 }
2184
2185 let start = (range.start as i64 + delta) as usize;
2186 let end = (range.end as i64 + delta) as usize;
2187 Some((start..end, new_text))
2188 });
2189
2190 self.start_transaction();
2191 self.text.set_line_ending(diff.line_ending);
2192 self.edit(adjusted_edits, None, cx);
2193 self.end_transaction(cx)
2194 }
2195
2196 pub fn has_unsaved_edits(&self) -> bool {
2197 let (last_version, has_unsaved_edits) = self.has_unsaved_edits.take();
2198
2199 if last_version == self.version {
2200 self.has_unsaved_edits
2201 .set((last_version, has_unsaved_edits));
2202 return has_unsaved_edits;
2203 }
2204
2205 let has_edits = self.has_edits_since(&self.saved_version);
2206 self.has_unsaved_edits
2207 .set((self.version.clone(), has_edits));
2208 has_edits
2209 }
2210
2211 /// Checks if the buffer has unsaved changes.
2212 pub fn is_dirty(&self) -> bool {
2213 if self.capability == Capability::ReadOnly {
2214 return false;
2215 }
2216 if self.has_conflict {
2217 return true;
2218 }
2219 match self.file.as_ref().map(|f| f.disk_state()) {
2220 Some(DiskState::New) | Some(DiskState::Deleted) => {
2221 !self.is_empty() && self.has_unsaved_edits()
2222 }
2223 _ => self.has_unsaved_edits(),
2224 }
2225 }
2226
2227 /// Marks the buffer as having a conflict regardless of current buffer state.
2228 pub fn set_conflict(&mut self) {
2229 self.has_conflict = true;
2230 }
2231
2232 /// Checks if the buffer and its file have both changed since the buffer
2233 /// was last saved or reloaded.
2234 pub fn has_conflict(&self) -> bool {
2235 if self.has_conflict {
2236 return true;
2237 }
2238 let Some(file) = self.file.as_ref() else {
2239 return false;
2240 };
2241 match file.disk_state() {
2242 DiskState::New => false,
2243 DiskState::Present { mtime } => match self.saved_mtime {
2244 Some(saved_mtime) => {
2245 mtime.bad_is_greater_than(saved_mtime) && self.has_unsaved_edits()
2246 }
2247 None => true,
2248 },
2249 DiskState::Deleted => false,
2250 }
2251 }
2252
2253 /// Gets a [`Subscription`] that tracks all of the changes to the buffer's text.
2254 pub fn subscribe(&mut self) -> Subscription<usize> {
2255 self.text.subscribe()
2256 }
2257
2258 /// Adds a bit to the list of bits that are set when the buffer's text changes.
2259 ///
2260 /// This allows downstream code to check if the buffer's text has changed without
2261 /// waiting for an effect cycle, which would be required if using eents.
2262 pub fn record_changes(&mut self, bit: rc::Weak<Cell<bool>>) {
2263 if let Err(ix) = self
2264 .change_bits
2265 .binary_search_by_key(&rc::Weak::as_ptr(&bit), rc::Weak::as_ptr)
2266 {
2267 self.change_bits.insert(ix, bit);
2268 }
2269 }
2270
2271 /// Set the change bit for all "listeners".
2272 fn was_changed(&mut self) {
2273 self.change_bits.retain(|change_bit| {
2274 change_bit
2275 .upgrade()
2276 .inspect(|bit| {
2277 _ = bit.replace(true);
2278 })
2279 .is_some()
2280 });
2281 }
2282
2283 /// Starts a transaction, if one is not already in-progress. When undoing or
2284 /// redoing edits, all of the edits performed within a transaction are undone
2285 /// or redone together.
2286 pub fn start_transaction(&mut self) -> Option<TransactionId> {
2287 self.start_transaction_at(Instant::now())
2288 }
2289
2290 /// Starts a transaction, providing the current time. Subsequent transactions
2291 /// that occur within a short period of time will be grouped together. This
2292 /// is controlled by the buffer's undo grouping duration.
2293 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
2294 self.transaction_depth += 1;
2295 if self.was_dirty_before_starting_transaction.is_none() {
2296 self.was_dirty_before_starting_transaction = Some(self.is_dirty());
2297 }
2298 self.text.start_transaction_at(now)
2299 }
2300
2301 /// Terminates the current transaction, if this is the outermost transaction.
2302 pub fn end_transaction(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2303 self.end_transaction_at(Instant::now(), cx)
2304 }
2305
2306 /// Terminates the current transaction, providing the current time. Subsequent transactions
2307 /// that occur within a short period of time will be grouped together. This
2308 /// is controlled by the buffer's undo grouping duration.
2309 pub fn end_transaction_at(
2310 &mut self,
2311 now: Instant,
2312 cx: &mut Context<Self>,
2313 ) -> Option<TransactionId> {
2314 assert!(self.transaction_depth > 0);
2315 self.transaction_depth -= 1;
2316 let was_dirty = if self.transaction_depth == 0 {
2317 self.was_dirty_before_starting_transaction.take().unwrap()
2318 } else {
2319 false
2320 };
2321 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
2322 self.did_edit(&start_version, was_dirty, cx);
2323 Some(transaction_id)
2324 } else {
2325 None
2326 }
2327 }
2328
2329 /// Manually add a transaction to the buffer's undo history.
2330 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
2331 self.text.push_transaction(transaction, now);
2332 }
2333
2334 /// Differs from `push_transaction` in that it does not clear the redo
2335 /// stack. Intended to be used to create a parent transaction to merge
2336 /// potential child transactions into.
2337 ///
2338 /// The caller is responsible for removing it from the undo history using
2339 /// `forget_transaction` if no edits are merged into it. Otherwise, if edits
2340 /// are merged into this transaction, the caller is responsible for ensuring
2341 /// the redo stack is cleared. The easiest way to ensure the redo stack is
2342 /// cleared is to create transactions with the usual `start_transaction` and
2343 /// `end_transaction` methods and merging the resulting transactions into
2344 /// the transaction created by this method
2345 pub fn push_empty_transaction(&mut self, now: Instant) -> TransactionId {
2346 self.text.push_empty_transaction(now)
2347 }
2348
2349 /// Prevent the last transaction from being grouped with any subsequent transactions,
2350 /// even if they occur with the buffer's undo grouping duration.
2351 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
2352 self.text.finalize_last_transaction()
2353 }
2354
2355 /// Manually group all changes since a given transaction.
2356 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
2357 self.text.group_until_transaction(transaction_id);
2358 }
2359
2360 /// Manually remove a transaction from the buffer's undo history
2361 pub fn forget_transaction(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
2362 self.text.forget_transaction(transaction_id)
2363 }
2364
2365 /// Retrieve a transaction from the buffer's undo history
2366 pub fn get_transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> {
2367 self.text.get_transaction(transaction_id)
2368 }
2369
2370 /// Manually merge two transactions in the buffer's undo history.
2371 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
2372 self.text.merge_transactions(transaction, destination);
2373 }
2374
2375 /// Waits for the buffer to receive operations with the given timestamps.
2376 pub fn wait_for_edits<It: IntoIterator<Item = clock::Lamport>>(
2377 &mut self,
2378 edit_ids: It,
2379 ) -> impl Future<Output = Result<()>> + use<It> {
2380 self.text.wait_for_edits(edit_ids)
2381 }
2382
2383 /// Waits for the buffer to receive the operations necessary for resolving the given anchors.
2384 pub fn wait_for_anchors<It: IntoIterator<Item = Anchor>>(
2385 &mut self,
2386 anchors: It,
2387 ) -> impl 'static + Future<Output = Result<()>> + use<It> {
2388 self.text.wait_for_anchors(anchors)
2389 }
2390
2391 /// Waits for the buffer to receive operations up to the given version.
2392 pub fn wait_for_version(
2393 &mut self,
2394 version: clock::Global,
2395 ) -> impl Future<Output = Result<()>> + use<> {
2396 self.text.wait_for_version(version)
2397 }
2398
2399 /// Forces all futures returned by [`Buffer::wait_for_version`], [`Buffer::wait_for_edits`], or
2400 /// [`Buffer::wait_for_version`] to resolve with an error.
2401 pub fn give_up_waiting(&mut self) {
2402 self.text.give_up_waiting();
2403 }
2404
2405 pub fn wait_for_autoindent_applied(&mut self) -> Option<oneshot::Receiver<()>> {
2406 let mut rx = None;
2407 if !self.autoindent_requests.is_empty() {
2408 let channel = oneshot::channel();
2409 self.wait_for_autoindent_txs.push(channel.0);
2410 rx = Some(channel.1);
2411 }
2412 rx
2413 }
2414
2415 /// Stores a set of selections that should be broadcasted to all of the buffer's replicas.
2416 pub fn set_active_selections(
2417 &mut self,
2418 selections: Arc<[Selection<Anchor>]>,
2419 line_mode: bool,
2420 cursor_shape: CursorShape,
2421 cx: &mut Context<Self>,
2422 ) {
2423 let lamport_timestamp = self.text.lamport_clock.tick();
2424 self.remote_selections.insert(
2425 self.text.replica_id(),
2426 SelectionSet {
2427 selections: selections.clone(),
2428 lamport_timestamp,
2429 line_mode,
2430 cursor_shape,
2431 },
2432 );
2433 self.send_operation(
2434 Operation::UpdateSelections {
2435 selections,
2436 line_mode,
2437 lamport_timestamp,
2438 cursor_shape,
2439 },
2440 true,
2441 cx,
2442 );
2443 self.non_text_state_update_count += 1;
2444 cx.notify();
2445 }
2446
2447 /// Clears the selections, so that other replicas of the buffer do not see any selections for
2448 /// this replica.
2449 pub fn remove_active_selections(&mut self, cx: &mut Context<Self>) {
2450 if self
2451 .remote_selections
2452 .get(&self.text.replica_id())
2453 .is_none_or(|set| !set.selections.is_empty())
2454 {
2455 self.set_active_selections(Arc::default(), false, Default::default(), cx);
2456 }
2457 }
2458
2459 pub fn set_agent_selections(
2460 &mut self,
2461 selections: Arc<[Selection<Anchor>]>,
2462 line_mode: bool,
2463 cursor_shape: CursorShape,
2464 cx: &mut Context<Self>,
2465 ) {
2466 let lamport_timestamp = self.text.lamport_clock.tick();
2467 self.remote_selections.insert(
2468 ReplicaId::AGENT,
2469 SelectionSet {
2470 selections,
2471 lamport_timestamp,
2472 line_mode,
2473 cursor_shape,
2474 },
2475 );
2476 self.non_text_state_update_count += 1;
2477 cx.notify();
2478 }
2479
2480 pub fn remove_agent_selections(&mut self, cx: &mut Context<Self>) {
2481 self.set_agent_selections(Arc::default(), false, Default::default(), cx);
2482 }
2483
2484 /// Replaces the buffer's entire text.
2485 pub fn set_text<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2486 where
2487 T: Into<Arc<str>>,
2488 {
2489 self.autoindent_requests.clear();
2490 self.edit([(0..self.len(), text)], None, cx)
2491 }
2492
2493 /// Appends the given text to the end of the buffer.
2494 pub fn append<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2495 where
2496 T: Into<Arc<str>>,
2497 {
2498 self.edit([(self.len()..self.len(), text)], None, cx)
2499 }
2500
2501 /// Applies the given edits to the buffer. Each edit is specified as a range of text to
2502 /// delete, and a string of text to insert at that location.
2503 ///
2504 /// If an [`AutoindentMode`] is provided, then the buffer will enqueue an auto-indent
2505 /// request for the edited ranges, which will be processed when the buffer finishes
2506 /// parsing.
2507 ///
2508 /// Parsing takes place at the end of a transaction, and may compute synchronously
2509 /// or asynchronously, depending on the changes.
2510 pub fn edit<I, S, T>(
2511 &mut self,
2512 edits_iter: I,
2513 autoindent_mode: Option<AutoindentMode>,
2514 cx: &mut Context<Self>,
2515 ) -> Option<clock::Lamport>
2516 where
2517 I: IntoIterator<Item = (Range<S>, T)>,
2518 S: ToOffset,
2519 T: Into<Arc<str>>,
2520 {
2521 // Skip invalid edits and coalesce contiguous ones.
2522 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
2523
2524 for (range, new_text) in edits_iter {
2525 let mut range = range.start.to_offset(self)..range.end.to_offset(self);
2526
2527 if range.start > range.end {
2528 mem::swap(&mut range.start, &mut range.end);
2529 }
2530 let new_text = new_text.into();
2531 if !new_text.is_empty() || !range.is_empty() {
2532 if let Some((prev_range, prev_text)) = edits.last_mut()
2533 && prev_range.end >= range.start
2534 {
2535 prev_range.end = cmp::max(prev_range.end, range.end);
2536 *prev_text = format!("{prev_text}{new_text}").into();
2537 } else {
2538 edits.push((range, new_text));
2539 }
2540 }
2541 }
2542 if edits.is_empty() {
2543 return None;
2544 }
2545
2546 self.start_transaction();
2547 self.pending_autoindent.take();
2548 let autoindent_request = autoindent_mode
2549 .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
2550
2551 let edit_operation = self.text.edit(edits.iter().cloned());
2552 let edit_id = edit_operation.timestamp();
2553
2554 if let Some((before_edit, mode)) = autoindent_request {
2555 let mut delta = 0isize;
2556 let mut previous_setting = None;
2557 let entries: Vec<_> = edits
2558 .into_iter()
2559 .enumerate()
2560 .zip(&edit_operation.as_edit().unwrap().new_text)
2561 .filter(|((_, (range, _)), _)| {
2562 let language = before_edit.language_at(range.start);
2563 let language_id = language.map(|l| l.id());
2564 if let Some((cached_language_id, auto_indent)) = previous_setting
2565 && cached_language_id == language_id
2566 {
2567 auto_indent
2568 } else {
2569 // The auto-indent setting is not present in editorconfigs, hence
2570 // we can avoid passing the file here.
2571 let auto_indent =
2572 language_settings(language.map(|l| l.name()), None, cx).auto_indent;
2573 previous_setting = Some((language_id, auto_indent));
2574 auto_indent
2575 }
2576 })
2577 .map(|((ix, (range, _)), new_text)| {
2578 let new_text_length = new_text.len();
2579 let old_start = range.start.to_point(&before_edit);
2580 let new_start = (delta + range.start as isize) as usize;
2581 let range_len = range.end - range.start;
2582 delta += new_text_length as isize - range_len as isize;
2583
2584 // Decide what range of the insertion to auto-indent, and whether
2585 // the first line of the insertion should be considered a newly-inserted line
2586 // or an edit to an existing line.
2587 let mut range_of_insertion_to_indent = 0..new_text_length;
2588 let mut first_line_is_new = true;
2589
2590 let old_line_start = before_edit.indent_size_for_line(old_start.row).len;
2591 let old_line_end = before_edit.line_len(old_start.row);
2592
2593 if old_start.column > old_line_start {
2594 first_line_is_new = false;
2595 }
2596
2597 if !new_text.contains('\n')
2598 && (old_start.column + (range_len as u32) < old_line_end
2599 || old_line_end == old_line_start)
2600 {
2601 first_line_is_new = false;
2602 }
2603
2604 // When inserting text starting with a newline, avoid auto-indenting the
2605 // previous line.
2606 if new_text.starts_with('\n') {
2607 range_of_insertion_to_indent.start += 1;
2608 first_line_is_new = true;
2609 }
2610
2611 let mut original_indent_column = None;
2612 if let AutoindentMode::Block {
2613 original_indent_columns,
2614 } = &mode
2615 {
2616 original_indent_column = Some(if new_text.starts_with('\n') {
2617 indent_size_for_text(
2618 new_text[range_of_insertion_to_indent.clone()].chars(),
2619 )
2620 .len
2621 } else {
2622 original_indent_columns
2623 .get(ix)
2624 .copied()
2625 .flatten()
2626 .unwrap_or_else(|| {
2627 indent_size_for_text(
2628 new_text[range_of_insertion_to_indent.clone()].chars(),
2629 )
2630 .len
2631 })
2632 });
2633
2634 // Avoid auto-indenting the line after the edit.
2635 if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
2636 range_of_insertion_to_indent.end -= 1;
2637 }
2638 }
2639
2640 AutoindentRequestEntry {
2641 first_line_is_new,
2642 original_indent_column,
2643 indent_size: before_edit.language_indent_size_at(range.start, cx),
2644 range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
2645 ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
2646 }
2647 })
2648 .collect();
2649
2650 if !entries.is_empty() {
2651 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2652 before_edit,
2653 entries,
2654 is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
2655 ignore_empty_lines: false,
2656 }));
2657 }
2658 }
2659
2660 self.end_transaction(cx);
2661 self.send_operation(Operation::Buffer(edit_operation), true, cx);
2662 Some(edit_id)
2663 }
2664
2665 fn did_edit(&mut self, old_version: &clock::Global, was_dirty: bool, cx: &mut Context<Self>) {
2666 self.was_changed();
2667
2668 if self.edits_since::<usize>(old_version).next().is_none() {
2669 return;
2670 }
2671
2672 self.reparse(cx, true);
2673 cx.emit(BufferEvent::Edited);
2674 if was_dirty != self.is_dirty() {
2675 cx.emit(BufferEvent::DirtyChanged);
2676 }
2677 cx.notify();
2678 }
2679
2680 pub fn autoindent_ranges<I, T>(&mut self, ranges: I, cx: &mut Context<Self>)
2681 where
2682 I: IntoIterator<Item = Range<T>>,
2683 T: ToOffset + Copy,
2684 {
2685 let before_edit = self.snapshot();
2686 let entries = ranges
2687 .into_iter()
2688 .map(|range| AutoindentRequestEntry {
2689 range: before_edit.anchor_before(range.start)..before_edit.anchor_after(range.end),
2690 first_line_is_new: true,
2691 indent_size: before_edit.language_indent_size_at(range.start, cx),
2692 original_indent_column: None,
2693 })
2694 .collect();
2695 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2696 before_edit,
2697 entries,
2698 is_block_mode: false,
2699 ignore_empty_lines: true,
2700 }));
2701 self.request_autoindent(cx);
2702 }
2703
2704 // Inserts newlines at the given position to create an empty line, returning the start of the new line.
2705 // You can also request the insertion of empty lines above and below the line starting at the returned point.
2706 pub fn insert_empty_line(
2707 &mut self,
2708 position: impl ToPoint,
2709 space_above: bool,
2710 space_below: bool,
2711 cx: &mut Context<Self>,
2712 ) -> Point {
2713 let mut position = position.to_point(self);
2714
2715 self.start_transaction();
2716
2717 self.edit(
2718 [(position..position, "\n")],
2719 Some(AutoindentMode::EachLine),
2720 cx,
2721 );
2722
2723 if position.column > 0 {
2724 position += Point::new(1, 0);
2725 }
2726
2727 if !self.is_line_blank(position.row) {
2728 self.edit(
2729 [(position..position, "\n")],
2730 Some(AutoindentMode::EachLine),
2731 cx,
2732 );
2733 }
2734
2735 if space_above && position.row > 0 && !self.is_line_blank(position.row - 1) {
2736 self.edit(
2737 [(position..position, "\n")],
2738 Some(AutoindentMode::EachLine),
2739 cx,
2740 );
2741 position.row += 1;
2742 }
2743
2744 if space_below
2745 && (position.row == self.max_point().row || !self.is_line_blank(position.row + 1))
2746 {
2747 self.edit(
2748 [(position..position, "\n")],
2749 Some(AutoindentMode::EachLine),
2750 cx,
2751 );
2752 }
2753
2754 self.end_transaction(cx);
2755
2756 position
2757 }
2758
2759 /// Applies the given remote operations to the buffer.
2760 pub fn apply_ops<I: IntoIterator<Item = Operation>>(&mut self, ops: I, cx: &mut Context<Self>) {
2761 self.pending_autoindent.take();
2762 let was_dirty = self.is_dirty();
2763 let old_version = self.version.clone();
2764 let mut deferred_ops = Vec::new();
2765 let buffer_ops = ops
2766 .into_iter()
2767 .filter_map(|op| match op {
2768 Operation::Buffer(op) => Some(op),
2769 _ => {
2770 if self.can_apply_op(&op) {
2771 self.apply_op(op, cx);
2772 } else {
2773 deferred_ops.push(op);
2774 }
2775 None
2776 }
2777 })
2778 .collect::<Vec<_>>();
2779 for operation in buffer_ops.iter() {
2780 self.send_operation(Operation::Buffer(operation.clone()), false, cx);
2781 }
2782 self.text.apply_ops(buffer_ops);
2783 self.deferred_ops.insert(deferred_ops);
2784 self.flush_deferred_ops(cx);
2785 self.did_edit(&old_version, was_dirty, cx);
2786 // Notify independently of whether the buffer was edited as the operations could include a
2787 // selection update.
2788 cx.notify();
2789 }
2790
2791 fn flush_deferred_ops(&mut self, cx: &mut Context<Self>) {
2792 let mut deferred_ops = Vec::new();
2793 for op in self.deferred_ops.drain().iter().cloned() {
2794 if self.can_apply_op(&op) {
2795 self.apply_op(op, cx);
2796 } else {
2797 deferred_ops.push(op);
2798 }
2799 }
2800 self.deferred_ops.insert(deferred_ops);
2801 }
2802
2803 pub fn has_deferred_ops(&self) -> bool {
2804 !self.deferred_ops.is_empty() || self.text.has_deferred_ops()
2805 }
2806
2807 fn can_apply_op(&self, operation: &Operation) -> bool {
2808 match operation {
2809 Operation::Buffer(_) => {
2810 unreachable!("buffer operations should never be applied at this layer")
2811 }
2812 Operation::UpdateDiagnostics {
2813 diagnostics: diagnostic_set,
2814 ..
2815 } => diagnostic_set.iter().all(|diagnostic| {
2816 self.text.can_resolve(&diagnostic.range.start)
2817 && self.text.can_resolve(&diagnostic.range.end)
2818 }),
2819 Operation::UpdateSelections { selections, .. } => selections
2820 .iter()
2821 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
2822 Operation::UpdateCompletionTriggers { .. } | Operation::UpdateLineEnding { .. } => true,
2823 }
2824 }
2825
2826 fn apply_op(&mut self, operation: Operation, cx: &mut Context<Self>) {
2827 match operation {
2828 Operation::Buffer(_) => {
2829 unreachable!("buffer operations should never be applied at this layer")
2830 }
2831 Operation::UpdateDiagnostics {
2832 server_id,
2833 diagnostics: diagnostic_set,
2834 lamport_timestamp,
2835 } => {
2836 let snapshot = self.snapshot();
2837 self.apply_diagnostic_update(
2838 server_id,
2839 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
2840 lamport_timestamp,
2841 cx,
2842 );
2843 }
2844 Operation::UpdateSelections {
2845 selections,
2846 lamport_timestamp,
2847 line_mode,
2848 cursor_shape,
2849 } => {
2850 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id)
2851 && set.lamport_timestamp > lamport_timestamp
2852 {
2853 return;
2854 }
2855
2856 self.remote_selections.insert(
2857 lamport_timestamp.replica_id,
2858 SelectionSet {
2859 selections,
2860 lamport_timestamp,
2861 line_mode,
2862 cursor_shape,
2863 },
2864 );
2865 self.text.lamport_clock.observe(lamport_timestamp);
2866 self.non_text_state_update_count += 1;
2867 }
2868 Operation::UpdateCompletionTriggers {
2869 triggers,
2870 lamport_timestamp,
2871 server_id,
2872 } => {
2873 if triggers.is_empty() {
2874 self.completion_triggers_per_language_server
2875 .remove(&server_id);
2876 self.completion_triggers = self
2877 .completion_triggers_per_language_server
2878 .values()
2879 .flat_map(|triggers| triggers.iter().cloned())
2880 .collect();
2881 } else {
2882 self.completion_triggers_per_language_server
2883 .insert(server_id, triggers.iter().cloned().collect());
2884 self.completion_triggers.extend(triggers);
2885 }
2886 self.text.lamport_clock.observe(lamport_timestamp);
2887 }
2888 Operation::UpdateLineEnding {
2889 line_ending,
2890 lamport_timestamp,
2891 } => {
2892 self.text.set_line_ending(line_ending);
2893 self.text.lamport_clock.observe(lamport_timestamp);
2894 }
2895 }
2896 }
2897
2898 fn apply_diagnostic_update(
2899 &mut self,
2900 server_id: LanguageServerId,
2901 diagnostics: DiagnosticSet,
2902 lamport_timestamp: clock::Lamport,
2903 cx: &mut Context<Self>,
2904 ) {
2905 if lamport_timestamp > self.diagnostics_timestamp {
2906 let ix = self.diagnostics.binary_search_by_key(&server_id, |e| e.0);
2907 if diagnostics.is_empty() {
2908 if let Ok(ix) = ix {
2909 self.diagnostics.remove(ix);
2910 }
2911 } else {
2912 match ix {
2913 Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)),
2914 Ok(ix) => self.diagnostics[ix].1 = diagnostics,
2915 };
2916 }
2917 self.diagnostics_timestamp = lamport_timestamp;
2918 self.non_text_state_update_count += 1;
2919 self.text.lamport_clock.observe(lamport_timestamp);
2920 cx.notify();
2921 cx.emit(BufferEvent::DiagnosticsUpdated);
2922 }
2923 }
2924
2925 fn send_operation(&mut self, operation: Operation, is_local: bool, cx: &mut Context<Self>) {
2926 self.was_changed();
2927 cx.emit(BufferEvent::Operation {
2928 operation,
2929 is_local,
2930 });
2931 }
2932
2933 /// Removes the selections for a given peer.
2934 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut Context<Self>) {
2935 self.remote_selections.remove(&replica_id);
2936 cx.notify();
2937 }
2938
2939 /// Undoes the most recent transaction.
2940 pub fn undo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2941 let was_dirty = self.is_dirty();
2942 let old_version = self.version.clone();
2943
2944 if let Some((transaction_id, operation)) = self.text.undo() {
2945 self.send_operation(Operation::Buffer(operation), true, cx);
2946 self.did_edit(&old_version, was_dirty, cx);
2947 Some(transaction_id)
2948 } else {
2949 None
2950 }
2951 }
2952
2953 /// Manually undoes a specific transaction in the buffer's undo history.
2954 pub fn undo_transaction(
2955 &mut self,
2956 transaction_id: TransactionId,
2957 cx: &mut Context<Self>,
2958 ) -> bool {
2959 let was_dirty = self.is_dirty();
2960 let old_version = self.version.clone();
2961 if let Some(operation) = self.text.undo_transaction(transaction_id) {
2962 self.send_operation(Operation::Buffer(operation), true, cx);
2963 self.did_edit(&old_version, was_dirty, cx);
2964 true
2965 } else {
2966 false
2967 }
2968 }
2969
2970 /// Manually undoes all changes after a given transaction in the buffer's undo history.
2971 pub fn undo_to_transaction(
2972 &mut self,
2973 transaction_id: TransactionId,
2974 cx: &mut Context<Self>,
2975 ) -> bool {
2976 let was_dirty = self.is_dirty();
2977 let old_version = self.version.clone();
2978
2979 let operations = self.text.undo_to_transaction(transaction_id);
2980 let undone = !operations.is_empty();
2981 for operation in operations {
2982 self.send_operation(Operation::Buffer(operation), true, cx);
2983 }
2984 if undone {
2985 self.did_edit(&old_version, was_dirty, cx)
2986 }
2987 undone
2988 }
2989
2990 pub fn undo_operations(&mut self, counts: HashMap<Lamport, u32>, cx: &mut Context<Buffer>) {
2991 let was_dirty = self.is_dirty();
2992 let operation = self.text.undo_operations(counts);
2993 let old_version = self.version.clone();
2994 self.send_operation(Operation::Buffer(operation), true, cx);
2995 self.did_edit(&old_version, was_dirty, cx);
2996 }
2997
2998 /// Manually redoes a specific transaction in the buffer's redo history.
2999 pub fn redo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
3000 let was_dirty = self.is_dirty();
3001 let old_version = self.version.clone();
3002
3003 if let Some((transaction_id, operation)) = self.text.redo() {
3004 self.send_operation(Operation::Buffer(operation), true, cx);
3005 self.did_edit(&old_version, was_dirty, cx);
3006 Some(transaction_id)
3007 } else {
3008 None
3009 }
3010 }
3011
3012 /// Manually undoes all changes until a given transaction in the buffer's redo history.
3013 pub fn redo_to_transaction(
3014 &mut self,
3015 transaction_id: TransactionId,
3016 cx: &mut Context<Self>,
3017 ) -> bool {
3018 let was_dirty = self.is_dirty();
3019 let old_version = self.version.clone();
3020
3021 let operations = self.text.redo_to_transaction(transaction_id);
3022 let redone = !operations.is_empty();
3023 for operation in operations {
3024 self.send_operation(Operation::Buffer(operation), true, cx);
3025 }
3026 if redone {
3027 self.did_edit(&old_version, was_dirty, cx)
3028 }
3029 redone
3030 }
3031
3032 /// Override current completion triggers with the user-provided completion triggers.
3033 pub fn set_completion_triggers(
3034 &mut self,
3035 server_id: LanguageServerId,
3036 triggers: BTreeSet<String>,
3037 cx: &mut Context<Self>,
3038 ) {
3039 self.completion_triggers_timestamp = self.text.lamport_clock.tick();
3040 if triggers.is_empty() {
3041 self.completion_triggers_per_language_server
3042 .remove(&server_id);
3043 self.completion_triggers = self
3044 .completion_triggers_per_language_server
3045 .values()
3046 .flat_map(|triggers| triggers.iter().cloned())
3047 .collect();
3048 } else {
3049 self.completion_triggers_per_language_server
3050 .insert(server_id, triggers.clone());
3051 self.completion_triggers.extend(triggers.iter().cloned());
3052 }
3053 self.send_operation(
3054 Operation::UpdateCompletionTriggers {
3055 triggers: triggers.into_iter().collect(),
3056 lamport_timestamp: self.completion_triggers_timestamp,
3057 server_id,
3058 },
3059 true,
3060 cx,
3061 );
3062 cx.notify();
3063 }
3064
3065 /// Returns a list of strings which trigger a completion menu for this language.
3066 /// Usually this is driven by LSP server which returns a list of trigger characters for completions.
3067 pub fn completion_triggers(&self) -> &BTreeSet<String> {
3068 &self.completion_triggers
3069 }
3070
3071 /// Call this directly after performing edits to prevent the preview tab
3072 /// from being dismissed by those edits. It causes `should_dismiss_preview`
3073 /// to return false until there are additional edits.
3074 pub fn refresh_preview(&mut self) {
3075 self.preview_version = self.version.clone();
3076 }
3077
3078 /// Whether we should preserve the preview status of a tab containing this buffer.
3079 pub fn preserve_preview(&self) -> bool {
3080 !self.has_edits_since(&self.preview_version)
3081 }
3082}
3083
3084#[doc(hidden)]
3085#[cfg(any(test, feature = "test-support"))]
3086impl Buffer {
3087 pub fn edit_via_marked_text(
3088 &mut self,
3089 marked_string: &str,
3090 autoindent_mode: Option<AutoindentMode>,
3091 cx: &mut Context<Self>,
3092 ) {
3093 let edits = self.edits_for_marked_text(marked_string);
3094 self.edit(edits, autoindent_mode, cx);
3095 }
3096
3097 pub fn set_group_interval(&mut self, group_interval: Duration) {
3098 self.text.set_group_interval(group_interval);
3099 }
3100
3101 pub fn randomly_edit<T>(&mut self, rng: &mut T, old_range_count: usize, cx: &mut Context<Self>)
3102 where
3103 T: rand::Rng,
3104 {
3105 let mut edits: Vec<(Range<usize>, String)> = Vec::new();
3106 let mut last_end = None;
3107 for _ in 0..old_range_count {
3108 if last_end.is_some_and(|last_end| last_end >= self.len()) {
3109 break;
3110 }
3111
3112 let new_start = last_end.map_or(0, |last_end| last_end + 1);
3113 let mut range = self.random_byte_range(new_start, rng);
3114 if rng.random_bool(0.2) {
3115 mem::swap(&mut range.start, &mut range.end);
3116 }
3117 last_end = Some(range.end);
3118
3119 let new_text_len = rng.random_range(0..10);
3120 let mut new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
3121 new_text = new_text.to_uppercase();
3122
3123 edits.push((range, new_text));
3124 }
3125 log::info!("mutating buffer {:?} with {:?}", self.replica_id(), edits);
3126 self.edit(edits, None, cx);
3127 }
3128
3129 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut Context<Self>) {
3130 let was_dirty = self.is_dirty();
3131 let old_version = self.version.clone();
3132
3133 let ops = self.text.randomly_undo_redo(rng);
3134 if !ops.is_empty() {
3135 for op in ops {
3136 self.send_operation(Operation::Buffer(op), true, cx);
3137 self.did_edit(&old_version, was_dirty, cx);
3138 }
3139 }
3140 }
3141}
3142
3143impl EventEmitter<BufferEvent> for Buffer {}
3144
3145impl Deref for Buffer {
3146 type Target = TextBuffer;
3147
3148 fn deref(&self) -> &Self::Target {
3149 &self.text
3150 }
3151}
3152
3153impl BufferSnapshot {
3154 /// Returns [`IndentSize`] for a given line that respects user settings and
3155 /// language preferences.
3156 pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
3157 indent_size_for_line(self, row)
3158 }
3159
3160 /// Returns [`IndentSize`] for a given position that respects user settings
3161 /// and language preferences.
3162 pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &App) -> IndentSize {
3163 let settings = language_settings(
3164 self.language_at(position).map(|l| l.name()),
3165 self.file(),
3166 cx,
3167 );
3168 if settings.hard_tabs {
3169 IndentSize::tab()
3170 } else {
3171 IndentSize::spaces(settings.tab_size.get())
3172 }
3173 }
3174
3175 /// Retrieve the suggested indent size for all of the given rows. The unit of indentation
3176 /// is passed in as `single_indent_size`.
3177 pub fn suggested_indents(
3178 &self,
3179 rows: impl Iterator<Item = u32>,
3180 single_indent_size: IndentSize,
3181 ) -> BTreeMap<u32, IndentSize> {
3182 let mut result = BTreeMap::new();
3183
3184 for row_range in contiguous_ranges(rows, 10) {
3185 let suggestions = match self.suggest_autoindents(row_range.clone()) {
3186 Some(suggestions) => suggestions,
3187 _ => break,
3188 };
3189
3190 for (row, suggestion) in row_range.zip(suggestions) {
3191 let indent_size = if let Some(suggestion) = suggestion {
3192 result
3193 .get(&suggestion.basis_row)
3194 .copied()
3195 .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
3196 .with_delta(suggestion.delta, single_indent_size)
3197 } else {
3198 self.indent_size_for_line(row)
3199 };
3200
3201 result.insert(row, indent_size);
3202 }
3203 }
3204
3205 result
3206 }
3207
3208 fn suggest_autoindents(
3209 &self,
3210 row_range: Range<u32>,
3211 ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
3212 let config = &self.language.as_ref()?.config;
3213 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
3214
3215 #[derive(Debug, Clone)]
3216 struct StartPosition {
3217 start: Point,
3218 suffix: SharedString,
3219 }
3220
3221 // Find the suggested indentation ranges based on the syntax tree.
3222 let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
3223 let end = Point::new(row_range.end, 0);
3224 let range = (start..end).to_offset(&self.text);
3225 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3226 Some(&grammar.indents_config.as_ref()?.query)
3227 });
3228 let indent_configs = matches
3229 .grammars()
3230 .iter()
3231 .map(|grammar| grammar.indents_config.as_ref().unwrap())
3232 .collect::<Vec<_>>();
3233
3234 let mut indent_ranges = Vec::<Range<Point>>::new();
3235 let mut start_positions = Vec::<StartPosition>::new();
3236 let mut outdent_positions = Vec::<Point>::new();
3237 while let Some(mat) = matches.peek() {
3238 let mut start: Option<Point> = None;
3239 let mut end: Option<Point> = None;
3240
3241 let config = indent_configs[mat.grammar_index];
3242 for capture in mat.captures {
3243 if capture.index == config.indent_capture_ix {
3244 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
3245 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
3246 } else if Some(capture.index) == config.start_capture_ix {
3247 start = Some(Point::from_ts_point(capture.node.end_position()));
3248 } else if Some(capture.index) == config.end_capture_ix {
3249 end = Some(Point::from_ts_point(capture.node.start_position()));
3250 } else if Some(capture.index) == config.outdent_capture_ix {
3251 outdent_positions.push(Point::from_ts_point(capture.node.start_position()));
3252 } else if let Some(suffix) = config.suffixed_start_captures.get(&capture.index) {
3253 start_positions.push(StartPosition {
3254 start: Point::from_ts_point(capture.node.start_position()),
3255 suffix: suffix.clone(),
3256 });
3257 }
3258 }
3259
3260 matches.advance();
3261 if let Some((start, end)) = start.zip(end) {
3262 if start.row == end.row {
3263 continue;
3264 }
3265 let range = start..end;
3266 match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
3267 Err(ix) => indent_ranges.insert(ix, range),
3268 Ok(ix) => {
3269 let prev_range = &mut indent_ranges[ix];
3270 prev_range.end = prev_range.end.max(range.end);
3271 }
3272 }
3273 }
3274 }
3275
3276 let mut error_ranges = Vec::<Range<Point>>::new();
3277 let mut matches = self
3278 .syntax
3279 .matches(range, &self.text, |grammar| grammar.error_query.as_ref());
3280 while let Some(mat) = matches.peek() {
3281 let node = mat.captures[0].node;
3282 let start = Point::from_ts_point(node.start_position());
3283 let end = Point::from_ts_point(node.end_position());
3284 let range = start..end;
3285 let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
3286 Ok(ix) | Err(ix) => ix,
3287 };
3288 let mut end_ix = ix;
3289 while let Some(existing_range) = error_ranges.get(end_ix) {
3290 if existing_range.end < end {
3291 end_ix += 1;
3292 } else {
3293 break;
3294 }
3295 }
3296 error_ranges.splice(ix..end_ix, [range]);
3297 matches.advance();
3298 }
3299
3300 outdent_positions.sort();
3301 for outdent_position in outdent_positions {
3302 // find the innermost indent range containing this outdent_position
3303 // set its end to the outdent position
3304 if let Some(range_to_truncate) = indent_ranges
3305 .iter_mut()
3306 .filter(|indent_range| indent_range.contains(&outdent_position))
3307 .next_back()
3308 {
3309 range_to_truncate.end = outdent_position;
3310 }
3311 }
3312
3313 start_positions.sort_by_key(|b| b.start);
3314
3315 // Find the suggested indentation increases and decreased based on regexes.
3316 let mut regex_outdent_map = HashMap::default();
3317 let mut last_seen_suffix: HashMap<String, Vec<Point>> = HashMap::default();
3318 let mut start_positions_iter = start_positions.iter().peekable();
3319
3320 let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
3321 self.for_each_line(
3322 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
3323 ..Point::new(row_range.end, 0),
3324 |row, line| {
3325 if config
3326 .decrease_indent_pattern
3327 .as_ref()
3328 .is_some_and(|regex| regex.is_match(line))
3329 {
3330 indent_change_rows.push((row, Ordering::Less));
3331 }
3332 if config
3333 .increase_indent_pattern
3334 .as_ref()
3335 .is_some_and(|regex| regex.is_match(line))
3336 {
3337 indent_change_rows.push((row + 1, Ordering::Greater));
3338 }
3339 while let Some(pos) = start_positions_iter.peek() {
3340 if pos.start.row < row {
3341 let pos = start_positions_iter.next().unwrap();
3342 last_seen_suffix
3343 .entry(pos.suffix.to_string())
3344 .or_default()
3345 .push(pos.start);
3346 } else {
3347 break;
3348 }
3349 }
3350 for rule in &config.decrease_indent_patterns {
3351 if rule.pattern.as_ref().is_some_and(|r| r.is_match(line)) {
3352 let row_start_column = self.indent_size_for_line(row).len;
3353 let basis_row = rule
3354 .valid_after
3355 .iter()
3356 .filter_map(|valid_suffix| last_seen_suffix.get(valid_suffix))
3357 .flatten()
3358 .filter(|start_point| start_point.column <= row_start_column)
3359 .max_by_key(|start_point| start_point.row);
3360 if let Some(outdent_to_row) = basis_row {
3361 regex_outdent_map.insert(row, outdent_to_row.row);
3362 }
3363 break;
3364 }
3365 }
3366 },
3367 );
3368
3369 let mut indent_changes = indent_change_rows.into_iter().peekable();
3370 let mut prev_row = if config.auto_indent_using_last_non_empty_line {
3371 prev_non_blank_row.unwrap_or(0)
3372 } else {
3373 row_range.start.saturating_sub(1)
3374 };
3375
3376 let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
3377 Some(row_range.map(move |row| {
3378 let row_start = Point::new(row, self.indent_size_for_line(row).len);
3379
3380 let mut indent_from_prev_row = false;
3381 let mut outdent_from_prev_row = false;
3382 let mut outdent_to_row = u32::MAX;
3383 let mut from_regex = false;
3384
3385 while let Some((indent_row, delta)) = indent_changes.peek() {
3386 match indent_row.cmp(&row) {
3387 Ordering::Equal => match delta {
3388 Ordering::Less => {
3389 from_regex = true;
3390 outdent_from_prev_row = true
3391 }
3392 Ordering::Greater => {
3393 indent_from_prev_row = true;
3394 from_regex = true
3395 }
3396 _ => {}
3397 },
3398
3399 Ordering::Greater => break,
3400 Ordering::Less => {}
3401 }
3402
3403 indent_changes.next();
3404 }
3405
3406 for range in &indent_ranges {
3407 if range.start.row >= row {
3408 break;
3409 }
3410 if range.start.row == prev_row && range.end > row_start {
3411 indent_from_prev_row = true;
3412 }
3413 if range.end > prev_row_start && range.end <= row_start {
3414 outdent_to_row = outdent_to_row.min(range.start.row);
3415 }
3416 }
3417
3418 if let Some(basis_row) = regex_outdent_map.get(&row) {
3419 indent_from_prev_row = false;
3420 outdent_to_row = *basis_row;
3421 from_regex = true;
3422 }
3423
3424 let within_error = error_ranges
3425 .iter()
3426 .any(|e| e.start.row < row && e.end > row_start);
3427
3428 let suggestion = if outdent_to_row == prev_row
3429 || (outdent_from_prev_row && indent_from_prev_row)
3430 {
3431 Some(IndentSuggestion {
3432 basis_row: prev_row,
3433 delta: Ordering::Equal,
3434 within_error: within_error && !from_regex,
3435 })
3436 } else if indent_from_prev_row {
3437 Some(IndentSuggestion {
3438 basis_row: prev_row,
3439 delta: Ordering::Greater,
3440 within_error: within_error && !from_regex,
3441 })
3442 } else if outdent_to_row < prev_row {
3443 Some(IndentSuggestion {
3444 basis_row: outdent_to_row,
3445 delta: Ordering::Equal,
3446 within_error: within_error && !from_regex,
3447 })
3448 } else if outdent_from_prev_row {
3449 Some(IndentSuggestion {
3450 basis_row: prev_row,
3451 delta: Ordering::Less,
3452 within_error: within_error && !from_regex,
3453 })
3454 } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
3455 {
3456 Some(IndentSuggestion {
3457 basis_row: prev_row,
3458 delta: Ordering::Equal,
3459 within_error: within_error && !from_regex,
3460 })
3461 } else {
3462 None
3463 };
3464
3465 prev_row = row;
3466 prev_row_start = row_start;
3467 suggestion
3468 }))
3469 }
3470
3471 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
3472 while row > 0 {
3473 row -= 1;
3474 if !self.is_line_blank(row) {
3475 return Some(row);
3476 }
3477 }
3478 None
3479 }
3480
3481 fn get_highlights(&self, range: Range<usize>) -> (SyntaxMapCaptures<'_>, Vec<HighlightMap>) {
3482 let captures = self.syntax.captures(range, &self.text, |grammar| {
3483 grammar
3484 .highlights_config
3485 .as_ref()
3486 .map(|config| &config.query)
3487 });
3488 let highlight_maps = captures
3489 .grammars()
3490 .iter()
3491 .map(|grammar| grammar.highlight_map())
3492 .collect();
3493 (captures, highlight_maps)
3494 }
3495
3496 /// Iterates over chunks of text in the given range of the buffer. Text is chunked
3497 /// in an arbitrary way due to being stored in a [`Rope`](text::Rope). The text is also
3498 /// returned in chunks where each chunk has a single syntax highlighting style and
3499 /// diagnostic status.
3500 pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks<'_> {
3501 let range = range.start.to_offset(self)..range.end.to_offset(self);
3502
3503 let mut syntax = None;
3504 if language_aware {
3505 syntax = Some(self.get_highlights(range.clone()));
3506 }
3507 // We want to look at diagnostic spans only when iterating over language-annotated chunks.
3508 let diagnostics = language_aware;
3509 BufferChunks::new(self.text.as_rope(), range, syntax, diagnostics, Some(self))
3510 }
3511
3512 pub fn highlighted_text_for_range<T: ToOffset>(
3513 &self,
3514 range: Range<T>,
3515 override_style: Option<HighlightStyle>,
3516 syntax_theme: &SyntaxTheme,
3517 ) -> HighlightedText {
3518 HighlightedText::from_buffer_range(
3519 range,
3520 &self.text,
3521 &self.syntax,
3522 override_style,
3523 syntax_theme,
3524 )
3525 }
3526
3527 /// Invokes the given callback for each line of text in the given range of the buffer.
3528 /// Uses callback to avoid allocating a string for each line.
3529 fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
3530 let mut line = String::new();
3531 let mut row = range.start.row;
3532 for chunk in self
3533 .as_rope()
3534 .chunks_in_range(range.to_offset(self))
3535 .chain(["\n"])
3536 {
3537 for (newline_ix, text) in chunk.split('\n').enumerate() {
3538 if newline_ix > 0 {
3539 callback(row, &line);
3540 row += 1;
3541 line.clear();
3542 }
3543 line.push_str(text);
3544 }
3545 }
3546 }
3547
3548 /// Iterates over every [`SyntaxLayer`] in the buffer.
3549 pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3550 self.syntax_layers_for_range(0..self.len(), true)
3551 }
3552
3553 pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayer<'_>> {
3554 let offset = position.to_offset(self);
3555 self.syntax_layers_for_range(offset..offset, false)
3556 .filter(|l| {
3557 if let Some(ranges) = l.included_sub_ranges {
3558 ranges.iter().any(|range| {
3559 let start = range.start.to_offset(self);
3560 start <= offset && {
3561 let end = range.end.to_offset(self);
3562 offset < end
3563 }
3564 })
3565 } else {
3566 l.node().start_byte() <= offset && l.node().end_byte() > offset
3567 }
3568 })
3569 .last()
3570 }
3571
3572 pub fn syntax_layers_for_range<D: ToOffset>(
3573 &self,
3574 range: Range<D>,
3575 include_hidden: bool,
3576 ) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3577 self.syntax
3578 .layers_for_range(range, &self.text, include_hidden)
3579 }
3580
3581 pub fn smallest_syntax_layer_containing<D: ToOffset>(
3582 &self,
3583 range: Range<D>,
3584 ) -> Option<SyntaxLayer<'_>> {
3585 let range = range.to_offset(self);
3586 self.syntax
3587 .layers_for_range(range, &self.text, false)
3588 .max_by(|a, b| {
3589 if a.depth != b.depth {
3590 a.depth.cmp(&b.depth)
3591 } else if a.offset.0 != b.offset.0 {
3592 a.offset.0.cmp(&b.offset.0)
3593 } else {
3594 a.node().end_byte().cmp(&b.node().end_byte()).reverse()
3595 }
3596 })
3597 }
3598
3599 /// Returns the main [`Language`].
3600 pub fn language(&self) -> Option<&Arc<Language>> {
3601 self.language.as_ref()
3602 }
3603
3604 /// Returns the [`Language`] at the given location.
3605 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
3606 self.syntax_layer_at(position)
3607 .map(|info| info.language)
3608 .or(self.language.as_ref())
3609 }
3610
3611 /// Returns the settings for the language at the given location.
3612 pub fn settings_at<'a, D: ToOffset>(
3613 &'a self,
3614 position: D,
3615 cx: &'a App,
3616 ) -> Cow<'a, LanguageSettings> {
3617 language_settings(
3618 self.language_at(position).map(|l| l.name()),
3619 self.file.as_ref(),
3620 cx,
3621 )
3622 }
3623
3624 pub fn char_classifier_at<T: ToOffset>(&self, point: T) -> CharClassifier {
3625 CharClassifier::new(self.language_scope_at(point))
3626 }
3627
3628 /// Returns the [`LanguageScope`] at the given location.
3629 pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
3630 let offset = position.to_offset(self);
3631 let mut scope = None;
3632 let mut smallest_range_and_depth: Option<(Range<usize>, usize)> = None;
3633
3634 // Use the layer that has the smallest node intersecting the given point.
3635 for layer in self
3636 .syntax
3637 .layers_for_range(offset..offset, &self.text, false)
3638 {
3639 let mut cursor = layer.node().walk();
3640
3641 let mut range = None;
3642 loop {
3643 let child_range = cursor.node().byte_range();
3644 if !child_range.contains(&offset) {
3645 break;
3646 }
3647
3648 range = Some(child_range);
3649 if cursor.goto_first_child_for_byte(offset).is_none() {
3650 break;
3651 }
3652 }
3653
3654 if let Some(range) = range
3655 && smallest_range_and_depth.as_ref().is_none_or(
3656 |(smallest_range, smallest_range_depth)| {
3657 if layer.depth > *smallest_range_depth {
3658 true
3659 } else if layer.depth == *smallest_range_depth {
3660 range.len() < smallest_range.len()
3661 } else {
3662 false
3663 }
3664 },
3665 )
3666 {
3667 smallest_range_and_depth = Some((range, layer.depth));
3668 scope = Some(LanguageScope {
3669 language: layer.language.clone(),
3670 override_id: layer.override_id(offset, &self.text),
3671 });
3672 }
3673 }
3674
3675 scope.or_else(|| {
3676 self.language.clone().map(|language| LanguageScope {
3677 language,
3678 override_id: None,
3679 })
3680 })
3681 }
3682
3683 /// Returns a tuple of the range and character kind of the word
3684 /// surrounding the given position.
3685 pub fn surrounding_word<T: ToOffset>(
3686 &self,
3687 start: T,
3688 scope_context: Option<CharScopeContext>,
3689 ) -> (Range<usize>, Option<CharKind>) {
3690 let mut start = start.to_offset(self);
3691 let mut end = start;
3692 let mut next_chars = self.chars_at(start).take(128).peekable();
3693 let mut prev_chars = self.reversed_chars_at(start).take(128).peekable();
3694
3695 let classifier = self.char_classifier_at(start).scope_context(scope_context);
3696 let word_kind = cmp::max(
3697 prev_chars.peek().copied().map(|c| classifier.kind(c)),
3698 next_chars.peek().copied().map(|c| classifier.kind(c)),
3699 );
3700
3701 for ch in prev_chars {
3702 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3703 start -= ch.len_utf8();
3704 } else {
3705 break;
3706 }
3707 }
3708
3709 for ch in next_chars {
3710 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3711 end += ch.len_utf8();
3712 } else {
3713 break;
3714 }
3715 }
3716
3717 (start..end, word_kind)
3718 }
3719
3720 /// Moves the TreeCursor to the smallest descendant or ancestor syntax node enclosing the given
3721 /// range. When `require_larger` is true, the node found must be larger than the query range.
3722 ///
3723 /// Returns true if a node was found, and false otherwise. In the `false` case the cursor will
3724 /// be moved to the root of the tree.
3725 fn goto_node_enclosing_range(
3726 cursor: &mut tree_sitter::TreeCursor,
3727 query_range: &Range<usize>,
3728 require_larger: bool,
3729 ) -> bool {
3730 let mut ascending = false;
3731 loop {
3732 let mut range = cursor.node().byte_range();
3733 if query_range.is_empty() {
3734 // When the query range is empty and the current node starts after it, move to the
3735 // previous sibling to find the node the containing node.
3736 if range.start > query_range.start {
3737 cursor.goto_previous_sibling();
3738 range = cursor.node().byte_range();
3739 }
3740 } else {
3741 // When the query range is non-empty and the current node ends exactly at the start,
3742 // move to the next sibling to find a node that extends beyond the start.
3743 if range.end == query_range.start {
3744 cursor.goto_next_sibling();
3745 range = cursor.node().byte_range();
3746 }
3747 }
3748
3749 let encloses = range.contains_inclusive(query_range)
3750 && (!require_larger || range.len() > query_range.len());
3751 if !encloses {
3752 ascending = true;
3753 if !cursor.goto_parent() {
3754 return false;
3755 }
3756 continue;
3757 } else if ascending {
3758 return true;
3759 }
3760
3761 // Descend into the current node.
3762 if cursor
3763 .goto_first_child_for_byte(query_range.start)
3764 .is_none()
3765 {
3766 return true;
3767 }
3768 }
3769 }
3770
3771 pub fn syntax_ancestor<'a, T: ToOffset>(
3772 &'a self,
3773 range: Range<T>,
3774 ) -> Option<tree_sitter::Node<'a>> {
3775 let range = range.start.to_offset(self)..range.end.to_offset(self);
3776 let mut result: Option<tree_sitter::Node<'a>> = None;
3777 for layer in self
3778 .syntax
3779 .layers_for_range(range.clone(), &self.text, true)
3780 {
3781 let mut cursor = layer.node().walk();
3782
3783 // Find the node that both contains the range and is larger than it.
3784 if !Self::goto_node_enclosing_range(&mut cursor, &range, true) {
3785 continue;
3786 }
3787
3788 let left_node = cursor.node();
3789 let mut layer_result = left_node;
3790
3791 // For an empty range, try to find another node immediately to the right of the range.
3792 if left_node.end_byte() == range.start {
3793 let mut right_node = None;
3794 while !cursor.goto_next_sibling() {
3795 if !cursor.goto_parent() {
3796 break;
3797 }
3798 }
3799
3800 while cursor.node().start_byte() == range.start {
3801 right_node = Some(cursor.node());
3802 if !cursor.goto_first_child() {
3803 break;
3804 }
3805 }
3806
3807 // If there is a candidate node on both sides of the (empty) range, then
3808 // decide between the two by favoring a named node over an anonymous token.
3809 // If both nodes are the same in that regard, favor the right one.
3810 if let Some(right_node) = right_node
3811 && (right_node.is_named() || !left_node.is_named())
3812 {
3813 layer_result = right_node;
3814 }
3815 }
3816
3817 if let Some(previous_result) = &result
3818 && previous_result.byte_range().len() < layer_result.byte_range().len()
3819 {
3820 continue;
3821 }
3822 result = Some(layer_result);
3823 }
3824
3825 result
3826 }
3827
3828 /// Find the previous sibling syntax node at the given range.
3829 ///
3830 /// This function locates the syntax node that precedes the node containing
3831 /// the given range. It searches hierarchically by:
3832 /// 1. Finding the node that contains the given range
3833 /// 2. Looking for the previous sibling at the same tree level
3834 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
3835 ///
3836 /// Returns `None` if there is no previous sibling at any ancestor level.
3837 pub fn syntax_prev_sibling<'a, T: ToOffset>(
3838 &'a self,
3839 range: Range<T>,
3840 ) -> Option<tree_sitter::Node<'a>> {
3841 let range = range.start.to_offset(self)..range.end.to_offset(self);
3842 let mut result: Option<tree_sitter::Node<'a>> = None;
3843
3844 for layer in self
3845 .syntax
3846 .layers_for_range(range.clone(), &self.text, true)
3847 {
3848 let mut cursor = layer.node().walk();
3849
3850 // Find the node that contains the range
3851 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
3852 continue;
3853 }
3854
3855 // Look for the previous sibling, moving up ancestor levels if needed
3856 loop {
3857 if cursor.goto_previous_sibling() {
3858 let layer_result = cursor.node();
3859
3860 if let Some(previous_result) = &result {
3861 if previous_result.byte_range().end < layer_result.byte_range().end {
3862 continue;
3863 }
3864 }
3865 result = Some(layer_result);
3866 break;
3867 }
3868
3869 // No sibling found at this level, try moving up to parent
3870 if !cursor.goto_parent() {
3871 break;
3872 }
3873 }
3874 }
3875
3876 result
3877 }
3878
3879 /// Find the next sibling syntax node at the given range.
3880 ///
3881 /// This function locates the syntax node that follows the node containing
3882 /// the given range. It searches hierarchically by:
3883 /// 1. Finding the node that contains the given range
3884 /// 2. Looking for the next sibling at the same tree level
3885 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
3886 ///
3887 /// Returns `None` if there is no next sibling at any ancestor level.
3888 pub fn syntax_next_sibling<'a, T: ToOffset>(
3889 &'a self,
3890 range: Range<T>,
3891 ) -> Option<tree_sitter::Node<'a>> {
3892 let range = range.start.to_offset(self)..range.end.to_offset(self);
3893 let mut result: Option<tree_sitter::Node<'a>> = None;
3894
3895 for layer in self
3896 .syntax
3897 .layers_for_range(range.clone(), &self.text, true)
3898 {
3899 let mut cursor = layer.node().walk();
3900
3901 // Find the node that contains the range
3902 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
3903 continue;
3904 }
3905
3906 // Look for the next sibling, moving up ancestor levels if needed
3907 loop {
3908 if cursor.goto_next_sibling() {
3909 let layer_result = cursor.node();
3910
3911 if let Some(previous_result) = &result {
3912 if previous_result.byte_range().start > layer_result.byte_range().start {
3913 continue;
3914 }
3915 }
3916 result = Some(layer_result);
3917 break;
3918 }
3919
3920 // No sibling found at this level, try moving up to parent
3921 if !cursor.goto_parent() {
3922 break;
3923 }
3924 }
3925 }
3926
3927 result
3928 }
3929
3930 /// Returns the root syntax node within the given row
3931 pub fn syntax_root_ancestor(&self, position: Anchor) -> Option<tree_sitter::Node<'_>> {
3932 let start_offset = position.to_offset(self);
3933
3934 let row = self.summary_for_anchor::<text::PointUtf16>(&position).row as usize;
3935
3936 let layer = self
3937 .syntax
3938 .layers_for_range(start_offset..start_offset, &self.text, true)
3939 .next()?;
3940
3941 let mut cursor = layer.node().walk();
3942
3943 // Descend to the first leaf that touches the start of the range.
3944 while cursor.goto_first_child_for_byte(start_offset).is_some() {
3945 if cursor.node().end_byte() == start_offset {
3946 cursor.goto_next_sibling();
3947 }
3948 }
3949
3950 // Ascend to the root node within the same row.
3951 while cursor.goto_parent() {
3952 if cursor.node().start_position().row != row {
3953 break;
3954 }
3955 }
3956
3957 Some(cursor.node())
3958 }
3959
3960 /// Returns the outline for the buffer.
3961 ///
3962 /// This method allows passing an optional [`SyntaxTheme`] to
3963 /// syntax-highlight the returned symbols.
3964 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Outline<Anchor> {
3965 Outline::new(self.outline_items_containing(0..self.len(), true, theme))
3966 }
3967
3968 /// Returns all the symbols that contain the given position.
3969 ///
3970 /// This method allows passing an optional [`SyntaxTheme`] to
3971 /// syntax-highlight the returned symbols.
3972 pub fn symbols_containing<T: ToOffset>(
3973 &self,
3974 position: T,
3975 theme: Option<&SyntaxTheme>,
3976 ) -> Vec<OutlineItem<Anchor>> {
3977 let position = position.to_offset(self);
3978 let start = self.clip_offset(position.saturating_sub(1), Bias::Left);
3979 let end = self.clip_offset(position + 1, Bias::Right);
3980 let mut items = self.outline_items_containing(start..end, false, theme);
3981 let mut prev_depth = None;
3982 items.retain(|item| {
3983 let result = prev_depth.is_none_or(|prev_depth| item.depth > prev_depth);
3984 prev_depth = Some(item.depth);
3985 result
3986 });
3987 items
3988 }
3989
3990 pub fn outline_range_containing<T: ToOffset>(&self, range: Range<T>) -> Option<Range<Point>> {
3991 let range = range.to_offset(self);
3992 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3993 grammar.outline_config.as_ref().map(|c| &c.query)
3994 });
3995 let configs = matches
3996 .grammars()
3997 .iter()
3998 .map(|g| g.outline_config.as_ref().unwrap())
3999 .collect::<Vec<_>>();
4000
4001 while let Some(mat) = matches.peek() {
4002 let config = &configs[mat.grammar_index];
4003 let containing_item_node = maybe!({
4004 let item_node = mat.captures.iter().find_map(|cap| {
4005 if cap.index == config.item_capture_ix {
4006 Some(cap.node)
4007 } else {
4008 None
4009 }
4010 })?;
4011
4012 let item_byte_range = item_node.byte_range();
4013 if item_byte_range.end < range.start || item_byte_range.start > range.end {
4014 None
4015 } else {
4016 Some(item_node)
4017 }
4018 });
4019
4020 if let Some(item_node) = containing_item_node {
4021 return Some(
4022 Point::from_ts_point(item_node.start_position())
4023 ..Point::from_ts_point(item_node.end_position()),
4024 );
4025 }
4026
4027 matches.advance();
4028 }
4029 None
4030 }
4031
4032 pub fn outline_items_containing<T: ToOffset>(
4033 &self,
4034 range: Range<T>,
4035 include_extra_context: bool,
4036 theme: Option<&SyntaxTheme>,
4037 ) -> Vec<OutlineItem<Anchor>> {
4038 self.outline_items_containing_internal(
4039 range,
4040 include_extra_context,
4041 theme,
4042 |this, range| this.anchor_after(range.start)..this.anchor_before(range.end),
4043 )
4044 }
4045
4046 pub fn outline_items_as_points_containing<T: ToOffset>(
4047 &self,
4048 range: Range<T>,
4049 include_extra_context: bool,
4050 theme: Option<&SyntaxTheme>,
4051 ) -> Vec<OutlineItem<Point>> {
4052 self.outline_items_containing_internal(range, include_extra_context, theme, |_, range| {
4053 range
4054 })
4055 }
4056
4057 pub fn outline_items_as_offsets_containing<T: ToOffset>(
4058 &self,
4059 range: Range<T>,
4060 include_extra_context: bool,
4061 theme: Option<&SyntaxTheme>,
4062 ) -> Vec<OutlineItem<usize>> {
4063 self.outline_items_containing_internal(
4064 range,
4065 include_extra_context,
4066 theme,
4067 |buffer, range| range.to_offset(buffer),
4068 )
4069 }
4070
4071 fn outline_items_containing_internal<T: ToOffset, U>(
4072 &self,
4073 range: Range<T>,
4074 include_extra_context: bool,
4075 theme: Option<&SyntaxTheme>,
4076 range_callback: fn(&Self, Range<Point>) -> Range<U>,
4077 ) -> Vec<OutlineItem<U>> {
4078 let range = range.to_offset(self);
4079 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
4080 grammar.outline_config.as_ref().map(|c| &c.query)
4081 });
4082
4083 let mut items = Vec::new();
4084 let mut annotation_row_ranges: Vec<Range<u32>> = Vec::new();
4085 while let Some(mat) = matches.peek() {
4086 let config = matches.grammars()[mat.grammar_index]
4087 .outline_config
4088 .as_ref()
4089 .unwrap();
4090 if let Some(item) =
4091 self.next_outline_item(config, &mat, &range, include_extra_context, theme)
4092 {
4093 items.push(item);
4094 } else if let Some(capture) = mat
4095 .captures
4096 .iter()
4097 .find(|capture| Some(capture.index) == config.annotation_capture_ix)
4098 {
4099 let capture_range = capture.node.start_position()..capture.node.end_position();
4100 let mut capture_row_range =
4101 capture_range.start.row as u32..capture_range.end.row as u32;
4102 if capture_range.end.row > capture_range.start.row && capture_range.end.column == 0
4103 {
4104 capture_row_range.end -= 1;
4105 }
4106 if let Some(last_row_range) = annotation_row_ranges.last_mut() {
4107 if last_row_range.end >= capture_row_range.start.saturating_sub(1) {
4108 last_row_range.end = capture_row_range.end;
4109 } else {
4110 annotation_row_ranges.push(capture_row_range);
4111 }
4112 } else {
4113 annotation_row_ranges.push(capture_row_range);
4114 }
4115 }
4116 matches.advance();
4117 }
4118
4119 items.sort_by_key(|item| (item.range.start, Reverse(item.range.end)));
4120
4121 // Assign depths based on containment relationships and convert to anchors.
4122 let mut item_ends_stack = Vec::<Point>::new();
4123 let mut anchor_items = Vec::new();
4124 let mut annotation_row_ranges = annotation_row_ranges.into_iter().peekable();
4125 for item in items {
4126 while let Some(last_end) = item_ends_stack.last().copied() {
4127 if last_end < item.range.end {
4128 item_ends_stack.pop();
4129 } else {
4130 break;
4131 }
4132 }
4133
4134 let mut annotation_row_range = None;
4135 while let Some(next_annotation_row_range) = annotation_row_ranges.peek() {
4136 let row_preceding_item = item.range.start.row.saturating_sub(1);
4137 if next_annotation_row_range.end < row_preceding_item {
4138 annotation_row_ranges.next();
4139 } else {
4140 if next_annotation_row_range.end == row_preceding_item {
4141 annotation_row_range = Some(next_annotation_row_range.clone());
4142 annotation_row_ranges.next();
4143 }
4144 break;
4145 }
4146 }
4147
4148 anchor_items.push(OutlineItem {
4149 depth: item_ends_stack.len(),
4150 range: range_callback(self, item.range.clone()),
4151 source_range_for_text: range_callback(self, item.source_range_for_text.clone()),
4152 text: item.text,
4153 highlight_ranges: item.highlight_ranges,
4154 name_ranges: item.name_ranges,
4155 body_range: item.body_range.map(|r| range_callback(self, r)),
4156 annotation_range: annotation_row_range.map(|annotation_range| {
4157 let point_range = Point::new(annotation_range.start, 0)
4158 ..Point::new(annotation_range.end, self.line_len(annotation_range.end));
4159 range_callback(self, point_range)
4160 }),
4161 });
4162 item_ends_stack.push(item.range.end);
4163 }
4164
4165 anchor_items
4166 }
4167
4168 fn next_outline_item(
4169 &self,
4170 config: &OutlineConfig,
4171 mat: &SyntaxMapMatch,
4172 range: &Range<usize>,
4173 include_extra_context: bool,
4174 theme: Option<&SyntaxTheme>,
4175 ) -> Option<OutlineItem<Point>> {
4176 let item_node = mat.captures.iter().find_map(|cap| {
4177 if cap.index == config.item_capture_ix {
4178 Some(cap.node)
4179 } else {
4180 None
4181 }
4182 })?;
4183
4184 let item_byte_range = item_node.byte_range();
4185 if item_byte_range.end < range.start || item_byte_range.start > range.end {
4186 return None;
4187 }
4188 let item_point_range = Point::from_ts_point(item_node.start_position())
4189 ..Point::from_ts_point(item_node.end_position());
4190
4191 let mut open_point = None;
4192 let mut close_point = None;
4193
4194 let mut buffer_ranges = Vec::new();
4195 let mut add_to_buffer_ranges = |node: tree_sitter::Node, node_is_name| {
4196 let mut range = node.start_byte()..node.end_byte();
4197 let start = node.start_position();
4198 if node.end_position().row > start.row {
4199 range.end = range.start + self.line_len(start.row as u32) as usize - start.column;
4200 }
4201
4202 if !range.is_empty() {
4203 buffer_ranges.push((range, node_is_name));
4204 }
4205 };
4206
4207 for capture in mat.captures {
4208 if capture.index == config.name_capture_ix {
4209 add_to_buffer_ranges(capture.node, true);
4210 } else if Some(capture.index) == config.context_capture_ix
4211 || (Some(capture.index) == config.extra_context_capture_ix && include_extra_context)
4212 {
4213 add_to_buffer_ranges(capture.node, false);
4214 } else {
4215 if Some(capture.index) == config.open_capture_ix {
4216 open_point = Some(Point::from_ts_point(capture.node.end_position()));
4217 } else if Some(capture.index) == config.close_capture_ix {
4218 close_point = Some(Point::from_ts_point(capture.node.start_position()));
4219 }
4220 }
4221 }
4222
4223 if buffer_ranges.is_empty() {
4224 return None;
4225 }
4226 let source_range_for_text =
4227 buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end;
4228
4229 let mut text = String::new();
4230 let mut highlight_ranges = Vec::new();
4231 let mut name_ranges = Vec::new();
4232 let mut chunks = self.chunks(source_range_for_text.clone(), true);
4233 let mut last_buffer_range_end = 0;
4234 for (buffer_range, is_name) in buffer_ranges {
4235 let space_added = !text.is_empty() && buffer_range.start > last_buffer_range_end;
4236 if space_added {
4237 text.push(' ');
4238 }
4239 let before_append_len = text.len();
4240 let mut offset = buffer_range.start;
4241 chunks.seek(buffer_range.clone());
4242 for mut chunk in chunks.by_ref() {
4243 if chunk.text.len() > buffer_range.end - offset {
4244 chunk.text = &chunk.text[0..(buffer_range.end - offset)];
4245 offset = buffer_range.end;
4246 } else {
4247 offset += chunk.text.len();
4248 }
4249 let style = chunk
4250 .syntax_highlight_id
4251 .zip(theme)
4252 .and_then(|(highlight, theme)| highlight.style(theme));
4253 if let Some(style) = style {
4254 let start = text.len();
4255 let end = start + chunk.text.len();
4256 highlight_ranges.push((start..end, style));
4257 }
4258 text.push_str(chunk.text);
4259 if offset >= buffer_range.end {
4260 break;
4261 }
4262 }
4263 if is_name {
4264 let after_append_len = text.len();
4265 let start = if space_added && !name_ranges.is_empty() {
4266 before_append_len - 1
4267 } else {
4268 before_append_len
4269 };
4270 name_ranges.push(start..after_append_len);
4271 }
4272 last_buffer_range_end = buffer_range.end;
4273 }
4274
4275 Some(OutlineItem {
4276 depth: 0, // We'll calculate the depth later
4277 range: item_point_range,
4278 source_range_for_text: source_range_for_text.to_point(self),
4279 text,
4280 highlight_ranges,
4281 name_ranges,
4282 body_range: open_point.zip(close_point).map(|(start, end)| start..end),
4283 annotation_range: None,
4284 })
4285 }
4286
4287 pub fn function_body_fold_ranges<T: ToOffset>(
4288 &self,
4289 within: Range<T>,
4290 ) -> impl Iterator<Item = Range<usize>> + '_ {
4291 self.text_object_ranges(within, TreeSitterOptions::default())
4292 .filter_map(|(range, obj)| (obj == TextObject::InsideFunction).then_some(range))
4293 }
4294
4295 /// For each grammar in the language, runs the provided
4296 /// [`tree_sitter::Query`] against the given range.
4297 pub fn matches(
4298 &self,
4299 range: Range<usize>,
4300 query: fn(&Grammar) -> Option<&tree_sitter::Query>,
4301 ) -> SyntaxMapMatches<'_> {
4302 self.syntax.matches(range, self, query)
4303 }
4304
4305 /// Finds all [`RowChunks`] applicable to the given range, then returns all bracket pairs that intersect with those chunks.
4306 /// Hence, may return more bracket pairs than the range contains.
4307 ///
4308 /// Will omit known chunks.
4309 /// The resulting bracket match collections are not ordered.
4310 pub fn fetch_bracket_ranges(
4311 &self,
4312 range: Range<usize>,
4313 known_chunks: Option<&HashSet<Range<BufferRow>>>,
4314 ) -> HashMap<Range<BufferRow>, Vec<BracketMatch<usize>>> {
4315 let mut all_bracket_matches = HashMap::default();
4316
4317 for chunk in self
4318 .tree_sitter_data
4319 .chunks
4320 .applicable_chunks(&[self.anchor_before(range.start)..self.anchor_after(range.end)])
4321 {
4322 if known_chunks.is_some_and(|chunks| chunks.contains(&chunk.row_range())) {
4323 continue;
4324 }
4325 let Some(chunk_range) = self.tree_sitter_data.chunks.chunk_range(chunk) else {
4326 continue;
4327 };
4328 let chunk_range = chunk_range.to_offset(&self);
4329
4330 if let Some(cached_brackets) =
4331 &self.tree_sitter_data.brackets_by_chunks.lock()[chunk.id]
4332 {
4333 all_bracket_matches.insert(chunk.row_range(), cached_brackets.clone());
4334 continue;
4335 }
4336
4337 let mut all_brackets = Vec::new();
4338 let mut opens = Vec::new();
4339 let mut color_pairs = Vec::new();
4340
4341 let mut matches = self
4342 .syntax
4343 .matches(chunk_range.clone(), &self.text, |grammar| {
4344 grammar.brackets_config.as_ref().map(|c| &c.query)
4345 });
4346 let configs = matches
4347 .grammars()
4348 .iter()
4349 .map(|grammar| grammar.brackets_config.as_ref().unwrap())
4350 .collect::<Vec<_>>();
4351
4352 while let Some(mat) = matches.peek() {
4353 let mut open = None;
4354 let mut close = None;
4355 let syntax_layer_depth = mat.depth;
4356 let config = configs[mat.grammar_index];
4357 let pattern = &config.patterns[mat.pattern_index];
4358 for capture in mat.captures {
4359 if capture.index == config.open_capture_ix {
4360 open = Some(capture.node.byte_range());
4361 } else if capture.index == config.close_capture_ix {
4362 close = Some(capture.node.byte_range());
4363 }
4364 }
4365
4366 matches.advance();
4367
4368 let Some((open_range, close_range)) = open.zip(close) else {
4369 continue;
4370 };
4371
4372 let bracket_range = open_range.start..=close_range.end;
4373 if !bracket_range.overlaps(&chunk_range) {
4374 continue;
4375 }
4376
4377 let index = all_brackets.len();
4378 all_brackets.push(BracketMatch {
4379 open_range: open_range.clone(),
4380 close_range: close_range.clone(),
4381 newline_only: pattern.newline_only,
4382 syntax_layer_depth,
4383 color_index: None,
4384 });
4385
4386 // Certain languages have "brackets" that are not brackets, e.g. tags. and such
4387 // bracket will match the entire tag with all text inside.
4388 // For now, avoid highlighting any pair that has more than single char in each bracket.
4389 // We need to colorize `<Element/>` bracket pairs, so cannot make this check stricter.
4390 let should_color =
4391 !pattern.rainbow_exclude && (open_range.len() == 1 || close_range.len() == 1);
4392 if should_color {
4393 opens.push(open_range.clone());
4394 color_pairs.push((open_range, close_range, index));
4395 }
4396 }
4397
4398 opens.sort_by_key(|r| (r.start, r.end));
4399 opens.dedup_by(|a, b| a.start == b.start && a.end == b.end);
4400 color_pairs.sort_by_key(|(_, close, _)| close.end);
4401
4402 let mut open_stack = Vec::new();
4403 let mut open_index = 0;
4404 for (open, close, index) in color_pairs {
4405 while open_index < opens.len() && opens[open_index].start < close.start {
4406 open_stack.push(opens[open_index].clone());
4407 open_index += 1;
4408 }
4409
4410 if open_stack.last() == Some(&open) {
4411 let depth_index = open_stack.len() - 1;
4412 all_brackets[index].color_index = Some(depth_index);
4413 open_stack.pop();
4414 }
4415 }
4416
4417 all_brackets.sort_by_key(|bracket_match| {
4418 (bracket_match.open_range.start, bracket_match.open_range.end)
4419 });
4420
4421 if let empty_slot @ None =
4422 &mut self.tree_sitter_data.brackets_by_chunks.lock()[chunk.id]
4423 {
4424 *empty_slot = Some(all_brackets.clone());
4425 }
4426 all_bracket_matches.insert(chunk.row_range(), all_brackets);
4427 }
4428
4429 all_bracket_matches
4430 }
4431
4432 pub fn all_bracket_ranges(
4433 &self,
4434 range: Range<usize>,
4435 ) -> impl Iterator<Item = BracketMatch<usize>> {
4436 self.fetch_bracket_ranges(range.clone(), None)
4437 .into_values()
4438 .flatten()
4439 .filter(move |bracket_match| {
4440 let bracket_range = bracket_match.open_range.start..bracket_match.close_range.end;
4441 bracket_range.overlaps(&range)
4442 })
4443 }
4444
4445 /// Returns bracket range pairs overlapping or adjacent to `range`
4446 pub fn bracket_ranges<T: ToOffset>(
4447 &self,
4448 range: Range<T>,
4449 ) -> impl Iterator<Item = BracketMatch<usize>> + '_ {
4450 // Find bracket pairs that *inclusively* contain the given range.
4451 let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self);
4452 self.all_bracket_ranges(range)
4453 .filter(|pair| !pair.newline_only)
4454 }
4455
4456 pub fn debug_variables_query<T: ToOffset>(
4457 &self,
4458 range: Range<T>,
4459 ) -> impl Iterator<Item = (Range<usize>, DebuggerTextObject)> + '_ {
4460 let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self);
4461
4462 let mut matches = self.syntax.matches_with_options(
4463 range.clone(),
4464 &self.text,
4465 TreeSitterOptions::default(),
4466 |grammar| grammar.debug_variables_config.as_ref().map(|c| &c.query),
4467 );
4468
4469 let configs = matches
4470 .grammars()
4471 .iter()
4472 .map(|grammar| grammar.debug_variables_config.as_ref())
4473 .collect::<Vec<_>>();
4474
4475 let mut captures = Vec::<(Range<usize>, DebuggerTextObject)>::new();
4476
4477 iter::from_fn(move || {
4478 loop {
4479 while let Some(capture) = captures.pop() {
4480 if capture.0.overlaps(&range) {
4481 return Some(capture);
4482 }
4483 }
4484
4485 let mat = matches.peek()?;
4486
4487 let Some(config) = configs[mat.grammar_index].as_ref() else {
4488 matches.advance();
4489 continue;
4490 };
4491
4492 for capture in mat.captures {
4493 let Some(ix) = config
4494 .objects_by_capture_ix
4495 .binary_search_by_key(&capture.index, |e| e.0)
4496 .ok()
4497 else {
4498 continue;
4499 };
4500 let text_object = config.objects_by_capture_ix[ix].1;
4501 let byte_range = capture.node.byte_range();
4502
4503 let mut found = false;
4504 for (range, existing) in captures.iter_mut() {
4505 if existing == &text_object {
4506 range.start = range.start.min(byte_range.start);
4507 range.end = range.end.max(byte_range.end);
4508 found = true;
4509 break;
4510 }
4511 }
4512
4513 if !found {
4514 captures.push((byte_range, text_object));
4515 }
4516 }
4517
4518 matches.advance();
4519 }
4520 })
4521 }
4522
4523 pub fn text_object_ranges<T: ToOffset>(
4524 &self,
4525 range: Range<T>,
4526 options: TreeSitterOptions,
4527 ) -> impl Iterator<Item = (Range<usize>, TextObject)> + '_ {
4528 let range =
4529 range.start.to_previous_offset(self)..self.len().min(range.end.to_next_offset(self));
4530
4531 let mut matches =
4532 self.syntax
4533 .matches_with_options(range.clone(), &self.text, options, |grammar| {
4534 grammar.text_object_config.as_ref().map(|c| &c.query)
4535 });
4536
4537 let configs = matches
4538 .grammars()
4539 .iter()
4540 .map(|grammar| grammar.text_object_config.as_ref())
4541 .collect::<Vec<_>>();
4542
4543 let mut captures = Vec::<(Range<usize>, TextObject)>::new();
4544
4545 iter::from_fn(move || {
4546 loop {
4547 while let Some(capture) = captures.pop() {
4548 if capture.0.overlaps(&range) {
4549 return Some(capture);
4550 }
4551 }
4552
4553 let mat = matches.peek()?;
4554
4555 let Some(config) = configs[mat.grammar_index].as_ref() else {
4556 matches.advance();
4557 continue;
4558 };
4559
4560 for capture in mat.captures {
4561 let Some(ix) = config
4562 .text_objects_by_capture_ix
4563 .binary_search_by_key(&capture.index, |e| e.0)
4564 .ok()
4565 else {
4566 continue;
4567 };
4568 let text_object = config.text_objects_by_capture_ix[ix].1;
4569 let byte_range = capture.node.byte_range();
4570
4571 let mut found = false;
4572 for (range, existing) in captures.iter_mut() {
4573 if existing == &text_object {
4574 range.start = range.start.min(byte_range.start);
4575 range.end = range.end.max(byte_range.end);
4576 found = true;
4577 break;
4578 }
4579 }
4580
4581 if !found {
4582 captures.push((byte_range, text_object));
4583 }
4584 }
4585
4586 matches.advance();
4587 }
4588 })
4589 }
4590
4591 /// Returns enclosing bracket ranges containing the given range
4592 pub fn enclosing_bracket_ranges<T: ToOffset>(
4593 &self,
4594 range: Range<T>,
4595 ) -> impl Iterator<Item = BracketMatch<usize>> + '_ {
4596 let range = range.start.to_offset(self)..range.end.to_offset(self);
4597
4598 let result: Vec<_> = self.bracket_ranges(range.clone()).collect();
4599 let max_depth = result
4600 .iter()
4601 .map(|mat| mat.syntax_layer_depth)
4602 .max()
4603 .unwrap_or(0);
4604 result.into_iter().filter(move |pair| {
4605 pair.open_range.start <= range.start
4606 && pair.close_range.end >= range.end
4607 && pair.syntax_layer_depth == max_depth
4608 })
4609 }
4610
4611 /// Returns the smallest enclosing bracket ranges containing the given range or None if no brackets contain range
4612 ///
4613 /// Can optionally pass a range_filter to filter the ranges of brackets to consider
4614 pub fn innermost_enclosing_bracket_ranges<T: ToOffset>(
4615 &self,
4616 range: Range<T>,
4617 range_filter: Option<&dyn Fn(Range<usize>, Range<usize>) -> bool>,
4618 ) -> Option<(Range<usize>, Range<usize>)> {
4619 let range = range.start.to_offset(self)..range.end.to_offset(self);
4620
4621 // Get the ranges of the innermost pair of brackets.
4622 let mut result: Option<(Range<usize>, Range<usize>)> = None;
4623
4624 for pair in self.enclosing_bracket_ranges(range) {
4625 if let Some(range_filter) = range_filter
4626 && !range_filter(pair.open_range.clone(), pair.close_range.clone())
4627 {
4628 continue;
4629 }
4630
4631 let len = pair.close_range.end - pair.open_range.start;
4632
4633 if let Some((existing_open, existing_close)) = &result {
4634 let existing_len = existing_close.end - existing_open.start;
4635 if len > existing_len {
4636 continue;
4637 }
4638 }
4639
4640 result = Some((pair.open_range, pair.close_range));
4641 }
4642
4643 result
4644 }
4645
4646 /// Returns anchor ranges for any matches of the redaction query.
4647 /// The buffer can be associated with multiple languages, and the redaction query associated with each
4648 /// will be run on the relevant section of the buffer.
4649 pub fn redacted_ranges<T: ToOffset>(
4650 &self,
4651 range: Range<T>,
4652 ) -> impl Iterator<Item = Range<usize>> + '_ {
4653 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4654 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4655 grammar
4656 .redactions_config
4657 .as_ref()
4658 .map(|config| &config.query)
4659 });
4660
4661 let configs = syntax_matches
4662 .grammars()
4663 .iter()
4664 .map(|grammar| grammar.redactions_config.as_ref())
4665 .collect::<Vec<_>>();
4666
4667 iter::from_fn(move || {
4668 let redacted_range = syntax_matches
4669 .peek()
4670 .and_then(|mat| {
4671 configs[mat.grammar_index].and_then(|config| {
4672 mat.captures
4673 .iter()
4674 .find(|capture| capture.index == config.redaction_capture_ix)
4675 })
4676 })
4677 .map(|mat| mat.node.byte_range());
4678 syntax_matches.advance();
4679 redacted_range
4680 })
4681 }
4682
4683 pub fn injections_intersecting_range<T: ToOffset>(
4684 &self,
4685 range: Range<T>,
4686 ) -> impl Iterator<Item = (Range<usize>, &Arc<Language>)> + '_ {
4687 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4688
4689 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4690 grammar
4691 .injection_config
4692 .as_ref()
4693 .map(|config| &config.query)
4694 });
4695
4696 let configs = syntax_matches
4697 .grammars()
4698 .iter()
4699 .map(|grammar| grammar.injection_config.as_ref())
4700 .collect::<Vec<_>>();
4701
4702 iter::from_fn(move || {
4703 let ranges = syntax_matches.peek().and_then(|mat| {
4704 let config = &configs[mat.grammar_index]?;
4705 let content_capture_range = mat.captures.iter().find_map(|capture| {
4706 if capture.index == config.content_capture_ix {
4707 Some(capture.node.byte_range())
4708 } else {
4709 None
4710 }
4711 })?;
4712 let language = self.language_at(content_capture_range.start)?;
4713 Some((content_capture_range, language))
4714 });
4715 syntax_matches.advance();
4716 ranges
4717 })
4718 }
4719
4720 pub fn runnable_ranges(
4721 &self,
4722 offset_range: Range<usize>,
4723 ) -> impl Iterator<Item = RunnableRange> + '_ {
4724 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4725 grammar.runnable_config.as_ref().map(|config| &config.query)
4726 });
4727
4728 let test_configs = syntax_matches
4729 .grammars()
4730 .iter()
4731 .map(|grammar| grammar.runnable_config.as_ref())
4732 .collect::<Vec<_>>();
4733
4734 iter::from_fn(move || {
4735 loop {
4736 let mat = syntax_matches.peek()?;
4737
4738 let test_range = test_configs[mat.grammar_index].and_then(|test_configs| {
4739 let mut run_range = None;
4740 let full_range = mat.captures.iter().fold(
4741 Range {
4742 start: usize::MAX,
4743 end: 0,
4744 },
4745 |mut acc, next| {
4746 let byte_range = next.node.byte_range();
4747 if acc.start > byte_range.start {
4748 acc.start = byte_range.start;
4749 }
4750 if acc.end < byte_range.end {
4751 acc.end = byte_range.end;
4752 }
4753 acc
4754 },
4755 );
4756 if full_range.start > full_range.end {
4757 // We did not find a full spanning range of this match.
4758 return None;
4759 }
4760 let extra_captures: SmallVec<[_; 1]> =
4761 SmallVec::from_iter(mat.captures.iter().filter_map(|capture| {
4762 test_configs
4763 .extra_captures
4764 .get(capture.index as usize)
4765 .cloned()
4766 .and_then(|tag_name| match tag_name {
4767 RunnableCapture::Named(name) => {
4768 Some((capture.node.byte_range(), name))
4769 }
4770 RunnableCapture::Run => {
4771 let _ = run_range.insert(capture.node.byte_range());
4772 None
4773 }
4774 })
4775 }));
4776 let run_range = run_range?;
4777 let tags = test_configs
4778 .query
4779 .property_settings(mat.pattern_index)
4780 .iter()
4781 .filter_map(|property| {
4782 if *property.key == *"tag" {
4783 property
4784 .value
4785 .as_ref()
4786 .map(|value| RunnableTag(value.to_string().into()))
4787 } else {
4788 None
4789 }
4790 })
4791 .collect();
4792 let extra_captures = extra_captures
4793 .into_iter()
4794 .map(|(range, name)| {
4795 (
4796 name.to_string(),
4797 self.text_for_range(range).collect::<String>(),
4798 )
4799 })
4800 .collect();
4801 // All tags should have the same range.
4802 Some(RunnableRange {
4803 run_range,
4804 full_range,
4805 runnable: Runnable {
4806 tags,
4807 language: mat.language,
4808 buffer: self.remote_id(),
4809 },
4810 extra_captures,
4811 buffer_id: self.remote_id(),
4812 })
4813 });
4814
4815 syntax_matches.advance();
4816 if test_range.is_some() {
4817 // It's fine for us to short-circuit on .peek()? returning None. We don't want to return None from this iter if we
4818 // had a capture that did not contain a run marker, hence we'll just loop around for the next capture.
4819 return test_range;
4820 }
4821 }
4822 })
4823 }
4824
4825 /// Returns selections for remote peers intersecting the given range.
4826 #[allow(clippy::type_complexity)]
4827 pub fn selections_in_range(
4828 &self,
4829 range: Range<Anchor>,
4830 include_local: bool,
4831 ) -> impl Iterator<
4832 Item = (
4833 ReplicaId,
4834 bool,
4835 CursorShape,
4836 impl Iterator<Item = &Selection<Anchor>> + '_,
4837 ),
4838 > + '_ {
4839 self.remote_selections
4840 .iter()
4841 .filter(move |(replica_id, set)| {
4842 (include_local || **replica_id != self.text.replica_id())
4843 && !set.selections.is_empty()
4844 })
4845 .map(move |(replica_id, set)| {
4846 let start_ix = match set.selections.binary_search_by(|probe| {
4847 probe.end.cmp(&range.start, self).then(Ordering::Greater)
4848 }) {
4849 Ok(ix) | Err(ix) => ix,
4850 };
4851 let end_ix = match set.selections.binary_search_by(|probe| {
4852 probe.start.cmp(&range.end, self).then(Ordering::Less)
4853 }) {
4854 Ok(ix) | Err(ix) => ix,
4855 };
4856
4857 (
4858 *replica_id,
4859 set.line_mode,
4860 set.cursor_shape,
4861 set.selections[start_ix..end_ix].iter(),
4862 )
4863 })
4864 }
4865
4866 /// Returns if the buffer contains any diagnostics.
4867 pub fn has_diagnostics(&self) -> bool {
4868 !self.diagnostics.is_empty()
4869 }
4870
4871 /// Returns all the diagnostics intersecting the given range.
4872 pub fn diagnostics_in_range<'a, T, O>(
4873 &'a self,
4874 search_range: Range<T>,
4875 reversed: bool,
4876 ) -> impl 'a + Iterator<Item = DiagnosticEntryRef<'a, O>>
4877 where
4878 T: 'a + Clone + ToOffset,
4879 O: 'a + FromAnchor,
4880 {
4881 let mut iterators: Vec<_> = self
4882 .diagnostics
4883 .iter()
4884 .map(|(_, collection)| {
4885 collection
4886 .range::<T, text::Anchor>(search_range.clone(), self, true, reversed)
4887 .peekable()
4888 })
4889 .collect();
4890
4891 std::iter::from_fn(move || {
4892 let (next_ix, _) = iterators
4893 .iter_mut()
4894 .enumerate()
4895 .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
4896 .min_by(|(_, a), (_, b)| {
4897 let cmp = a
4898 .range
4899 .start
4900 .cmp(&b.range.start, self)
4901 // when range is equal, sort by diagnostic severity
4902 .then(a.diagnostic.severity.cmp(&b.diagnostic.severity))
4903 // and stabilize order with group_id
4904 .then(a.diagnostic.group_id.cmp(&b.diagnostic.group_id));
4905 if reversed { cmp.reverse() } else { cmp }
4906 })?;
4907 iterators[next_ix]
4908 .next()
4909 .map(
4910 |DiagnosticEntryRef { range, diagnostic }| DiagnosticEntryRef {
4911 diagnostic,
4912 range: FromAnchor::from_anchor(&range.start, self)
4913 ..FromAnchor::from_anchor(&range.end, self),
4914 },
4915 )
4916 })
4917 }
4918
4919 /// Raw access to the diagnostic sets. Typically `diagnostic_groups` or `diagnostic_group`
4920 /// should be used instead.
4921 pub fn diagnostic_sets(&self) -> &SmallVec<[(LanguageServerId, DiagnosticSet); 2]> {
4922 &self.diagnostics
4923 }
4924
4925 /// Returns all the diagnostic groups associated with the given
4926 /// language server ID. If no language server ID is provided,
4927 /// all diagnostics groups are returned.
4928 pub fn diagnostic_groups(
4929 &self,
4930 language_server_id: Option<LanguageServerId>,
4931 ) -> Vec<(LanguageServerId, DiagnosticGroup<'_, Anchor>)> {
4932 let mut groups = Vec::new();
4933
4934 if let Some(language_server_id) = language_server_id {
4935 if let Ok(ix) = self
4936 .diagnostics
4937 .binary_search_by_key(&language_server_id, |e| e.0)
4938 {
4939 self.diagnostics[ix]
4940 .1
4941 .groups(language_server_id, &mut groups, self);
4942 }
4943 } else {
4944 for (language_server_id, diagnostics) in self.diagnostics.iter() {
4945 diagnostics.groups(*language_server_id, &mut groups, self);
4946 }
4947 }
4948
4949 groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
4950 let a_start = &group_a.entries[group_a.primary_ix].range.start;
4951 let b_start = &group_b.entries[group_b.primary_ix].range.start;
4952 a_start.cmp(b_start, self).then_with(|| id_a.cmp(id_b))
4953 });
4954
4955 groups
4956 }
4957
4958 /// Returns an iterator over the diagnostics for the given group.
4959 pub fn diagnostic_group<O>(
4960 &self,
4961 group_id: usize,
4962 ) -> impl Iterator<Item = DiagnosticEntryRef<'_, O>> + use<'_, O>
4963 where
4964 O: FromAnchor + 'static,
4965 {
4966 self.diagnostics
4967 .iter()
4968 .flat_map(move |(_, set)| set.group(group_id, self))
4969 }
4970
4971 /// An integer version number that accounts for all updates besides
4972 /// the buffer's text itself (which is versioned via a version vector).
4973 pub fn non_text_state_update_count(&self) -> usize {
4974 self.non_text_state_update_count
4975 }
4976
4977 /// An integer version that changes when the buffer's syntax changes.
4978 pub fn syntax_update_count(&self) -> usize {
4979 self.syntax.update_count()
4980 }
4981
4982 /// Returns a snapshot of underlying file.
4983 pub fn file(&self) -> Option<&Arc<dyn File>> {
4984 self.file.as_ref()
4985 }
4986
4987 pub fn resolve_file_path(&self, include_root: bool, cx: &App) -> Option<String> {
4988 if let Some(file) = self.file() {
4989 if file.path().file_name().is_none() || include_root {
4990 Some(file.full_path(cx).to_string_lossy().into_owned())
4991 } else {
4992 Some(file.path().display(file.path_style(cx)).to_string())
4993 }
4994 } else {
4995 None
4996 }
4997 }
4998
4999 pub fn words_in_range(&self, query: WordsQuery) -> BTreeMap<String, Range<Anchor>> {
5000 let query_str = query.fuzzy_contents;
5001 if query_str.is_some_and(|query| query.is_empty()) {
5002 return BTreeMap::default();
5003 }
5004
5005 let classifier = CharClassifier::new(self.language.clone().map(|language| LanguageScope {
5006 language,
5007 override_id: None,
5008 }));
5009
5010 let mut query_ix = 0;
5011 let query_chars = query_str.map(|query| query.chars().collect::<Vec<_>>());
5012 let query_len = query_chars.as_ref().map_or(0, |query| query.len());
5013
5014 let mut words = BTreeMap::default();
5015 let mut current_word_start_ix = None;
5016 let mut chunk_ix = query.range.start;
5017 for chunk in self.chunks(query.range, false) {
5018 for (i, c) in chunk.text.char_indices() {
5019 let ix = chunk_ix + i;
5020 if classifier.is_word(c) {
5021 if current_word_start_ix.is_none() {
5022 current_word_start_ix = Some(ix);
5023 }
5024
5025 if let Some(query_chars) = &query_chars
5026 && query_ix < query_len
5027 && c.to_lowercase().eq(query_chars[query_ix].to_lowercase())
5028 {
5029 query_ix += 1;
5030 }
5031 continue;
5032 } else if let Some(word_start) = current_word_start_ix.take()
5033 && query_ix == query_len
5034 {
5035 let word_range = self.anchor_before(word_start)..self.anchor_after(ix);
5036 let mut word_text = self.text_for_range(word_start..ix).peekable();
5037 let first_char = word_text
5038 .peek()
5039 .and_then(|first_chunk| first_chunk.chars().next());
5040 // Skip empty and "words" starting with digits as a heuristic to reduce useless completions
5041 if !query.skip_digits
5042 || first_char.is_none_or(|first_char| !first_char.is_digit(10))
5043 {
5044 words.insert(word_text.collect(), word_range);
5045 }
5046 }
5047 query_ix = 0;
5048 }
5049 chunk_ix += chunk.text.len();
5050 }
5051
5052 words
5053 }
5054}
5055
5056pub struct WordsQuery<'a> {
5057 /// Only returns words with all chars from the fuzzy string in them.
5058 pub fuzzy_contents: Option<&'a str>,
5059 /// Skips words that start with a digit.
5060 pub skip_digits: bool,
5061 /// Buffer offset range, to look for words.
5062 pub range: Range<usize>,
5063}
5064
5065fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
5066 indent_size_for_text(text.chars_at(Point::new(row, 0)))
5067}
5068
5069fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
5070 let mut result = IndentSize::spaces(0);
5071 for c in text {
5072 let kind = match c {
5073 ' ' => IndentKind::Space,
5074 '\t' => IndentKind::Tab,
5075 _ => break,
5076 };
5077 if result.len == 0 {
5078 result.kind = kind;
5079 }
5080 result.len += 1;
5081 }
5082 result
5083}
5084
5085impl Clone for BufferSnapshot {
5086 fn clone(&self) -> Self {
5087 Self {
5088 text: self.text.clone(),
5089 syntax: self.syntax.clone(),
5090 file: self.file.clone(),
5091 remote_selections: self.remote_selections.clone(),
5092 diagnostics: self.diagnostics.clone(),
5093 language: self.language.clone(),
5094 tree_sitter_data: self.tree_sitter_data.clone(),
5095 non_text_state_update_count: self.non_text_state_update_count,
5096 }
5097 }
5098}
5099
5100impl Deref for BufferSnapshot {
5101 type Target = text::BufferSnapshot;
5102
5103 fn deref(&self) -> &Self::Target {
5104 &self.text
5105 }
5106}
5107
5108unsafe impl Send for BufferChunks<'_> {}
5109
5110impl<'a> BufferChunks<'a> {
5111 pub(crate) fn new(
5112 text: &'a Rope,
5113 range: Range<usize>,
5114 syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
5115 diagnostics: bool,
5116 buffer_snapshot: Option<&'a BufferSnapshot>,
5117 ) -> Self {
5118 let mut highlights = None;
5119 if let Some((captures, highlight_maps)) = syntax {
5120 highlights = Some(BufferChunkHighlights {
5121 captures,
5122 next_capture: None,
5123 stack: Default::default(),
5124 highlight_maps,
5125 })
5126 }
5127
5128 let diagnostic_endpoints = diagnostics.then(|| Vec::new().into_iter().peekable());
5129 let chunks = text.chunks_in_range(range.clone());
5130
5131 let mut this = BufferChunks {
5132 range,
5133 buffer_snapshot,
5134 chunks,
5135 diagnostic_endpoints,
5136 error_depth: 0,
5137 warning_depth: 0,
5138 information_depth: 0,
5139 hint_depth: 0,
5140 unnecessary_depth: 0,
5141 underline: true,
5142 highlights,
5143 };
5144 this.initialize_diagnostic_endpoints();
5145 this
5146 }
5147
5148 /// Seeks to the given byte offset in the buffer.
5149 pub fn seek(&mut self, range: Range<usize>) {
5150 let old_range = std::mem::replace(&mut self.range, range.clone());
5151 self.chunks.set_range(self.range.clone());
5152 if let Some(highlights) = self.highlights.as_mut() {
5153 if old_range.start <= self.range.start && old_range.end >= self.range.end {
5154 // Reuse existing highlights stack, as the new range is a subrange of the old one.
5155 highlights
5156 .stack
5157 .retain(|(end_offset, _)| *end_offset > range.start);
5158 if let Some(capture) = &highlights.next_capture
5159 && range.start >= capture.node.start_byte()
5160 {
5161 let next_capture_end = capture.node.end_byte();
5162 if range.start < next_capture_end {
5163 highlights.stack.push((
5164 next_capture_end,
5165 highlights.highlight_maps[capture.grammar_index].get(capture.index),
5166 ));
5167 }
5168 highlights.next_capture.take();
5169 }
5170 } else if let Some(snapshot) = self.buffer_snapshot {
5171 let (captures, highlight_maps) = snapshot.get_highlights(self.range.clone());
5172 *highlights = BufferChunkHighlights {
5173 captures,
5174 next_capture: None,
5175 stack: Default::default(),
5176 highlight_maps,
5177 };
5178 } else {
5179 // We cannot obtain new highlights for a language-aware buffer iterator, as we don't have a buffer snapshot.
5180 // Seeking such BufferChunks is not supported.
5181 debug_assert!(
5182 false,
5183 "Attempted to seek on a language-aware buffer iterator without associated buffer snapshot"
5184 );
5185 }
5186
5187 highlights.captures.set_byte_range(self.range.clone());
5188 self.initialize_diagnostic_endpoints();
5189 }
5190 }
5191
5192 fn initialize_diagnostic_endpoints(&mut self) {
5193 if let Some(diagnostics) = self.diagnostic_endpoints.as_mut()
5194 && let Some(buffer) = self.buffer_snapshot
5195 {
5196 let mut diagnostic_endpoints = Vec::new();
5197 for entry in buffer.diagnostics_in_range::<_, usize>(self.range.clone(), false) {
5198 diagnostic_endpoints.push(DiagnosticEndpoint {
5199 offset: entry.range.start,
5200 is_start: true,
5201 severity: entry.diagnostic.severity,
5202 is_unnecessary: entry.diagnostic.is_unnecessary,
5203 underline: entry.diagnostic.underline,
5204 });
5205 diagnostic_endpoints.push(DiagnosticEndpoint {
5206 offset: entry.range.end,
5207 is_start: false,
5208 severity: entry.diagnostic.severity,
5209 is_unnecessary: entry.diagnostic.is_unnecessary,
5210 underline: entry.diagnostic.underline,
5211 });
5212 }
5213 diagnostic_endpoints
5214 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
5215 *diagnostics = diagnostic_endpoints.into_iter().peekable();
5216 self.hint_depth = 0;
5217 self.error_depth = 0;
5218 self.warning_depth = 0;
5219 self.information_depth = 0;
5220 }
5221 }
5222
5223 /// The current byte offset in the buffer.
5224 pub fn offset(&self) -> usize {
5225 self.range.start
5226 }
5227
5228 pub fn range(&self) -> Range<usize> {
5229 self.range.clone()
5230 }
5231
5232 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
5233 let depth = match endpoint.severity {
5234 DiagnosticSeverity::ERROR => &mut self.error_depth,
5235 DiagnosticSeverity::WARNING => &mut self.warning_depth,
5236 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
5237 DiagnosticSeverity::HINT => &mut self.hint_depth,
5238 _ => return,
5239 };
5240 if endpoint.is_start {
5241 *depth += 1;
5242 } else {
5243 *depth -= 1;
5244 }
5245
5246 if endpoint.is_unnecessary {
5247 if endpoint.is_start {
5248 self.unnecessary_depth += 1;
5249 } else {
5250 self.unnecessary_depth -= 1;
5251 }
5252 }
5253 }
5254
5255 fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
5256 if self.error_depth > 0 {
5257 Some(DiagnosticSeverity::ERROR)
5258 } else if self.warning_depth > 0 {
5259 Some(DiagnosticSeverity::WARNING)
5260 } else if self.information_depth > 0 {
5261 Some(DiagnosticSeverity::INFORMATION)
5262 } else if self.hint_depth > 0 {
5263 Some(DiagnosticSeverity::HINT)
5264 } else {
5265 None
5266 }
5267 }
5268
5269 fn current_code_is_unnecessary(&self) -> bool {
5270 self.unnecessary_depth > 0
5271 }
5272}
5273
5274impl<'a> Iterator for BufferChunks<'a> {
5275 type Item = Chunk<'a>;
5276
5277 fn next(&mut self) -> Option<Self::Item> {
5278 let mut next_capture_start = usize::MAX;
5279 let mut next_diagnostic_endpoint = usize::MAX;
5280
5281 if let Some(highlights) = self.highlights.as_mut() {
5282 while let Some((parent_capture_end, _)) = highlights.stack.last() {
5283 if *parent_capture_end <= self.range.start {
5284 highlights.stack.pop();
5285 } else {
5286 break;
5287 }
5288 }
5289
5290 if highlights.next_capture.is_none() {
5291 highlights.next_capture = highlights.captures.next();
5292 }
5293
5294 while let Some(capture) = highlights.next_capture.as_ref() {
5295 if self.range.start < capture.node.start_byte() {
5296 next_capture_start = capture.node.start_byte();
5297 break;
5298 } else {
5299 let highlight_id =
5300 highlights.highlight_maps[capture.grammar_index].get(capture.index);
5301 highlights
5302 .stack
5303 .push((capture.node.end_byte(), highlight_id));
5304 highlights.next_capture = highlights.captures.next();
5305 }
5306 }
5307 }
5308
5309 let mut diagnostic_endpoints = std::mem::take(&mut self.diagnostic_endpoints);
5310 if let Some(diagnostic_endpoints) = diagnostic_endpoints.as_mut() {
5311 while let Some(endpoint) = diagnostic_endpoints.peek().copied() {
5312 if endpoint.offset <= self.range.start {
5313 self.update_diagnostic_depths(endpoint);
5314 diagnostic_endpoints.next();
5315 self.underline = endpoint.underline;
5316 } else {
5317 next_diagnostic_endpoint = endpoint.offset;
5318 break;
5319 }
5320 }
5321 }
5322 self.diagnostic_endpoints = diagnostic_endpoints;
5323
5324 if let Some(ChunkBitmaps {
5325 text: chunk,
5326 chars: chars_map,
5327 tabs,
5328 }) = self.chunks.peek_with_bitmaps()
5329 {
5330 let chunk_start = self.range.start;
5331 let mut chunk_end = (self.chunks.offset() + chunk.len())
5332 .min(next_capture_start)
5333 .min(next_diagnostic_endpoint);
5334 let mut highlight_id = None;
5335 if let Some(highlights) = self.highlights.as_ref()
5336 && let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last()
5337 {
5338 chunk_end = chunk_end.min(*parent_capture_end);
5339 highlight_id = Some(*parent_highlight_id);
5340 }
5341 let bit_start = chunk_start - self.chunks.offset();
5342 let bit_end = chunk_end - self.chunks.offset();
5343
5344 let slice = &chunk[bit_start..bit_end];
5345
5346 let mask = 1u128.unbounded_shl(bit_end as u32).wrapping_sub(1);
5347 let tabs = (tabs >> bit_start) & mask;
5348 let chars = (chars_map >> bit_start) & mask;
5349
5350 self.range.start = chunk_end;
5351 if self.range.start == self.chunks.offset() + chunk.len() {
5352 self.chunks.next().unwrap();
5353 }
5354
5355 Some(Chunk {
5356 text: slice,
5357 syntax_highlight_id: highlight_id,
5358 underline: self.underline,
5359 diagnostic_severity: self.current_diagnostic_severity(),
5360 is_unnecessary: self.current_code_is_unnecessary(),
5361 tabs,
5362 chars,
5363 ..Chunk::default()
5364 })
5365 } else {
5366 None
5367 }
5368 }
5369}
5370
5371impl operation_queue::Operation for Operation {
5372 fn lamport_timestamp(&self) -> clock::Lamport {
5373 match self {
5374 Operation::Buffer(_) => {
5375 unreachable!("buffer operations should never be deferred at this layer")
5376 }
5377 Operation::UpdateDiagnostics {
5378 lamport_timestamp, ..
5379 }
5380 | Operation::UpdateSelections {
5381 lamport_timestamp, ..
5382 }
5383 | Operation::UpdateCompletionTriggers {
5384 lamport_timestamp, ..
5385 }
5386 | Operation::UpdateLineEnding {
5387 lamport_timestamp, ..
5388 } => *lamport_timestamp,
5389 }
5390 }
5391}
5392
5393impl Default for Diagnostic {
5394 fn default() -> Self {
5395 Self {
5396 source: Default::default(),
5397 source_kind: DiagnosticSourceKind::Other,
5398 code: None,
5399 code_description: None,
5400 severity: DiagnosticSeverity::ERROR,
5401 message: Default::default(),
5402 markdown: None,
5403 group_id: 0,
5404 is_primary: false,
5405 is_disk_based: false,
5406 is_unnecessary: false,
5407 underline: true,
5408 data: None,
5409 registration_id: None,
5410 }
5411 }
5412}
5413
5414impl IndentSize {
5415 /// Returns an [`IndentSize`] representing the given spaces.
5416 pub fn spaces(len: u32) -> Self {
5417 Self {
5418 len,
5419 kind: IndentKind::Space,
5420 }
5421 }
5422
5423 /// Returns an [`IndentSize`] representing a tab.
5424 pub fn tab() -> Self {
5425 Self {
5426 len: 1,
5427 kind: IndentKind::Tab,
5428 }
5429 }
5430
5431 /// An iterator over the characters represented by this [`IndentSize`].
5432 pub fn chars(&self) -> impl Iterator<Item = char> {
5433 iter::repeat(self.char()).take(self.len as usize)
5434 }
5435
5436 /// The character representation of this [`IndentSize`].
5437 pub fn char(&self) -> char {
5438 match self.kind {
5439 IndentKind::Space => ' ',
5440 IndentKind::Tab => '\t',
5441 }
5442 }
5443
5444 /// Consumes the current [`IndentSize`] and returns a new one that has
5445 /// been shrunk or enlarged by the given size along the given direction.
5446 pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
5447 match direction {
5448 Ordering::Less => {
5449 if self.kind == size.kind && self.len >= size.len {
5450 self.len -= size.len;
5451 }
5452 }
5453 Ordering::Equal => {}
5454 Ordering::Greater => {
5455 if self.len == 0 {
5456 self = size;
5457 } else if self.kind == size.kind {
5458 self.len += size.len;
5459 }
5460 }
5461 }
5462 self
5463 }
5464
5465 pub fn len_with_expanded_tabs(&self, tab_size: NonZeroU32) -> usize {
5466 match self.kind {
5467 IndentKind::Space => self.len as usize,
5468 IndentKind::Tab => self.len as usize * tab_size.get() as usize,
5469 }
5470 }
5471}
5472
5473#[cfg(any(test, feature = "test-support"))]
5474pub struct TestFile {
5475 pub path: Arc<RelPath>,
5476 pub root_name: String,
5477 pub local_root: Option<PathBuf>,
5478}
5479
5480#[cfg(any(test, feature = "test-support"))]
5481impl File for TestFile {
5482 fn path(&self) -> &Arc<RelPath> {
5483 &self.path
5484 }
5485
5486 fn full_path(&self, _: &gpui::App) -> PathBuf {
5487 PathBuf::from(self.root_name.clone()).join(self.path.as_std_path())
5488 }
5489
5490 fn as_local(&self) -> Option<&dyn LocalFile> {
5491 if self.local_root.is_some() {
5492 Some(self)
5493 } else {
5494 None
5495 }
5496 }
5497
5498 fn disk_state(&self) -> DiskState {
5499 unimplemented!()
5500 }
5501
5502 fn file_name<'a>(&'a self, _: &'a gpui::App) -> &'a str {
5503 self.path().file_name().unwrap_or(self.root_name.as_ref())
5504 }
5505
5506 fn worktree_id(&self, _: &App) -> WorktreeId {
5507 WorktreeId::from_usize(0)
5508 }
5509
5510 fn to_proto(&self, _: &App) -> rpc::proto::File {
5511 unimplemented!()
5512 }
5513
5514 fn is_private(&self) -> bool {
5515 false
5516 }
5517
5518 fn path_style(&self, _cx: &App) -> PathStyle {
5519 PathStyle::local()
5520 }
5521}
5522
5523#[cfg(any(test, feature = "test-support"))]
5524impl LocalFile for TestFile {
5525 fn abs_path(&self, _cx: &App) -> PathBuf {
5526 PathBuf::from(self.local_root.as_ref().unwrap())
5527 .join(&self.root_name)
5528 .join(self.path.as_std_path())
5529 }
5530
5531 fn load(&self, _cx: &App) -> Task<Result<String>> {
5532 unimplemented!()
5533 }
5534
5535 fn load_bytes(&self, _cx: &App) -> Task<Result<Vec<u8>>> {
5536 unimplemented!()
5537 }
5538}
5539
5540pub(crate) fn contiguous_ranges(
5541 values: impl Iterator<Item = u32>,
5542 max_len: usize,
5543) -> impl Iterator<Item = Range<u32>> {
5544 let mut values = values;
5545 let mut current_range: Option<Range<u32>> = None;
5546 std::iter::from_fn(move || {
5547 loop {
5548 if let Some(value) = values.next() {
5549 if let Some(range) = &mut current_range
5550 && value == range.end
5551 && range.len() < max_len
5552 {
5553 range.end += 1;
5554 continue;
5555 }
5556
5557 let prev_range = current_range.clone();
5558 current_range = Some(value..(value + 1));
5559 if prev_range.is_some() {
5560 return prev_range;
5561 }
5562 } else {
5563 return current_range.take();
5564 }
5565 }
5566 })
5567}
5568
5569#[derive(Default, Debug)]
5570pub struct CharClassifier {
5571 scope: Option<LanguageScope>,
5572 scope_context: Option<CharScopeContext>,
5573 ignore_punctuation: bool,
5574}
5575
5576impl CharClassifier {
5577 pub fn new(scope: Option<LanguageScope>) -> Self {
5578 Self {
5579 scope,
5580 scope_context: None,
5581 ignore_punctuation: false,
5582 }
5583 }
5584
5585 pub fn scope_context(self, scope_context: Option<CharScopeContext>) -> Self {
5586 Self {
5587 scope_context,
5588 ..self
5589 }
5590 }
5591
5592 pub fn ignore_punctuation(self, ignore_punctuation: bool) -> Self {
5593 Self {
5594 ignore_punctuation,
5595 ..self
5596 }
5597 }
5598
5599 pub fn is_whitespace(&self, c: char) -> bool {
5600 self.kind(c) == CharKind::Whitespace
5601 }
5602
5603 pub fn is_word(&self, c: char) -> bool {
5604 self.kind(c) == CharKind::Word
5605 }
5606
5607 pub fn is_punctuation(&self, c: char) -> bool {
5608 self.kind(c) == CharKind::Punctuation
5609 }
5610
5611 pub fn kind_with(&self, c: char, ignore_punctuation: bool) -> CharKind {
5612 if c.is_alphanumeric() || c == '_' {
5613 return CharKind::Word;
5614 }
5615
5616 if let Some(scope) = &self.scope {
5617 let characters = match self.scope_context {
5618 Some(CharScopeContext::Completion) => scope.completion_query_characters(),
5619 Some(CharScopeContext::LinkedEdit) => scope.linked_edit_characters(),
5620 None => scope.word_characters(),
5621 };
5622 if let Some(characters) = characters
5623 && characters.contains(&c)
5624 {
5625 return CharKind::Word;
5626 }
5627 }
5628
5629 if c.is_whitespace() {
5630 return CharKind::Whitespace;
5631 }
5632
5633 if ignore_punctuation {
5634 CharKind::Word
5635 } else {
5636 CharKind::Punctuation
5637 }
5638 }
5639
5640 pub fn kind(&self, c: char) -> CharKind {
5641 self.kind_with(c, self.ignore_punctuation)
5642 }
5643}
5644
5645/// Find all of the ranges of whitespace that occur at the ends of lines
5646/// in the given rope.
5647///
5648/// This could also be done with a regex search, but this implementation
5649/// avoids copying text.
5650pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
5651 let mut ranges = Vec::new();
5652
5653 let mut offset = 0;
5654 let mut prev_chunk_trailing_whitespace_range = 0..0;
5655 for chunk in rope.chunks() {
5656 let mut prev_line_trailing_whitespace_range = 0..0;
5657 for (i, line) in chunk.split('\n').enumerate() {
5658 let line_end_offset = offset + line.len();
5659 let trimmed_line_len = line.trim_end_matches([' ', '\t']).len();
5660 let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
5661
5662 if i == 0 && trimmed_line_len == 0 {
5663 trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
5664 }
5665 if !prev_line_trailing_whitespace_range.is_empty() {
5666 ranges.push(prev_line_trailing_whitespace_range);
5667 }
5668
5669 offset = line_end_offset + 1;
5670 prev_line_trailing_whitespace_range = trailing_whitespace_range;
5671 }
5672
5673 offset -= 1;
5674 prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
5675 }
5676
5677 if !prev_chunk_trailing_whitespace_range.is_empty() {
5678 ranges.push(prev_chunk_trailing_whitespace_range);
5679 }
5680
5681 ranges
5682}