1use crate::{
2 DebuggerTextObject, LanguageScope, Outline, OutlineConfig, RunnableCapture, RunnableTag,
3 TextObject, TreeSitterOptions,
4 diagnostic_set::{DiagnosticEntry, DiagnosticEntryRef, DiagnosticGroup},
5 language_settings::{LanguageSettings, language_settings},
6 outline::OutlineItem,
7 syntax_map::{
8 SyntaxLayer, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures, SyntaxMapMatch,
9 SyntaxMapMatches, SyntaxSnapshot, ToTreeSitterPoint,
10 },
11 task_context::RunnableRange,
12 text_diff::text_diff,
13};
14pub use crate::{
15 Grammar, Language, LanguageRegistry,
16 diagnostic_set::DiagnosticSet,
17 highlight_map::{HighlightId, HighlightMap},
18 proto,
19};
20use anyhow::{Context as _, Result};
21pub use clock::ReplicaId;
22use clock::{Global, Lamport};
23use collections::HashMap;
24use fs::MTime;
25use futures::channel::oneshot;
26use gpui::{
27 App, AppContext as _, BackgroundExecutor, Context, Entity, EventEmitter, HighlightStyle,
28 SharedString, StyledText, Task, TaskLabel, TextStyle,
29};
30
31use lsp::{LanguageServerId, NumberOrString};
32use parking_lot::{Mutex, RwLock};
33use serde::{Deserialize, Serialize};
34use serde_json::Value;
35use settings::WorktreeId;
36use smallvec::SmallVec;
37use smol::future::yield_now;
38use std::{
39 any::Any,
40 borrow::Cow,
41 cell::Cell,
42 cmp::{self, Ordering, Reverse},
43 collections::{BTreeMap, BTreeSet},
44 future::Future,
45 iter::{self, Iterator, Peekable},
46 mem,
47 num::NonZeroU32,
48 ops::{Deref, Range},
49 path::PathBuf,
50 rc,
51 sync::{Arc, LazyLock},
52 time::{Duration, Instant},
53 vec,
54};
55use sum_tree::TreeMap;
56use text::operation_queue::OperationQueue;
57use text::*;
58pub use text::{
59 Anchor, Bias, Buffer as TextBuffer, BufferId, BufferSnapshot as TextBufferSnapshot, Edit,
60 LineIndent, OffsetRangeExt, OffsetUtf16, Patch, Point, PointUtf16, Rope, Selection,
61 SelectionGoal, Subscription, TextDimension, TextSummary, ToOffset, ToOffsetUtf16, ToPoint,
62 ToPointUtf16, Transaction, TransactionId, Unclipped,
63};
64use theme::{ActiveTheme as _, SyntaxTheme};
65#[cfg(any(test, feature = "test-support"))]
66use util::RandomCharIter;
67use util::{RangeExt, debug_panic, maybe, paths::PathStyle, rel_path::RelPath};
68
69#[cfg(any(test, feature = "test-support"))]
70pub use {tree_sitter_python, tree_sitter_rust, tree_sitter_typescript};
71
72pub use lsp::DiagnosticSeverity;
73
74/// A label for the background task spawned by the buffer to compute
75/// a diff against the contents of its file.
76pub static BUFFER_DIFF_TASK: LazyLock<TaskLabel> = LazyLock::new(TaskLabel::new);
77
78/// Indicate whether a [`Buffer`] has permissions to edit.
79#[derive(PartialEq, Clone, Copy, Debug)]
80pub enum Capability {
81 /// The buffer is a mutable replica.
82 ReadWrite,
83 /// The buffer is a read-only replica.
84 ReadOnly,
85}
86
87pub type BufferRow = u32;
88
89/// An in-memory representation of a source code file, including its text,
90/// syntax trees, git status, and diagnostics.
91pub struct Buffer {
92 text: TextBuffer,
93 branch_state: Option<BufferBranchState>,
94 /// Filesystem state, `None` when there is no path.
95 file: Option<Arc<dyn File>>,
96 /// The mtime of the file when this buffer was last loaded from
97 /// or saved to disk.
98 saved_mtime: Option<MTime>,
99 /// The version vector when this buffer was last loaded from
100 /// or saved to disk.
101 saved_version: clock::Global,
102 preview_version: clock::Global,
103 transaction_depth: usize,
104 was_dirty_before_starting_transaction: Option<bool>,
105 reload_task: Option<Task<Result<()>>>,
106 language: Option<Arc<Language>>,
107 autoindent_requests: Vec<Arc<AutoindentRequest>>,
108 wait_for_autoindent_txs: Vec<oneshot::Sender<()>>,
109 pending_autoindent: Option<Task<()>>,
110 sync_parse_timeout: Duration,
111 syntax_map: Mutex<SyntaxMap>,
112 reparse: Option<Task<()>>,
113 parse_status: (watch::Sender<ParseStatus>, watch::Receiver<ParseStatus>),
114 non_text_state_update_count: usize,
115 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
116 remote_selections: TreeMap<ReplicaId, SelectionSet>,
117 diagnostics_timestamp: clock::Lamport,
118 completion_triggers: BTreeSet<String>,
119 completion_triggers_per_language_server: HashMap<LanguageServerId, BTreeSet<String>>,
120 completion_triggers_timestamp: clock::Lamport,
121 deferred_ops: OperationQueue<Operation>,
122 capability: Capability,
123 has_conflict: bool,
124 /// Memoize calls to has_changes_since(saved_version).
125 /// The contents of a cell are (self.version, has_changes) at the time of a last call.
126 has_unsaved_edits: Cell<(clock::Global, bool)>,
127 change_bits: Vec<rc::Weak<Cell<bool>>>,
128 _subscriptions: Vec<gpui::Subscription>,
129 tree_sitter_data: Arc<RwLock<TreeSitterData>>,
130}
131
132#[derive(Debug)]
133pub struct TreeSitterData {
134 data_for_version: Global,
135 chunks: Vec<BufferChunk>,
136 brackets_by_chunks: Vec<Option<Vec<BracketMatch>>>,
137}
138
139const MAX_ROWS_IN_A_CHUNK: u32 = 50;
140
141#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
142pub struct BufferChunk {
143 id: usize,
144 pub start: BufferRow,
145 pub end: BufferRow,
146}
147
148impl TreeSitterData {
149 fn from_buffer_range(buffer_point_range: Range<Point>, version: Global) -> Self {
150 let buffer_row_range = buffer_point_range.start.row..=buffer_point_range.end.row;
151 let chunks = buffer_row_range
152 .clone()
153 .step_by(MAX_ROWS_IN_A_CHUNK as usize)
154 .enumerate()
155 .map(|(id, chunk_start)| {
156 let chunk_end =
157 std::cmp::min(chunk_start + MAX_ROWS_IN_A_CHUNK, *buffer_row_range.end());
158 BufferChunk {
159 id,
160 start: chunk_start,
161 end: chunk_end,
162 }
163 })
164 .collect::<Vec<_>>();
165
166 Self {
167 data_for_version: version,
168 brackets_by_chunks: vec![None; chunks.len()],
169 chunks,
170 }
171 }
172}
173
174#[derive(Copy, Clone, Debug, PartialEq, Eq)]
175pub enum ParseStatus {
176 Idle,
177 Parsing,
178}
179
180struct BufferBranchState {
181 base_buffer: Entity<Buffer>,
182 merged_operations: Vec<Lamport>,
183}
184
185/// An immutable, cheaply cloneable representation of a fixed
186/// state of a buffer.
187pub struct BufferSnapshot {
188 pub text: text::BufferSnapshot,
189 pub syntax: SyntaxSnapshot,
190 file: Option<Arc<dyn File>>,
191 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
192 remote_selections: TreeMap<ReplicaId, SelectionSet>,
193 language: Option<Arc<Language>>,
194 non_text_state_update_count: usize,
195 tree_sitter_data: Arc<RwLock<TreeSitterData>>,
196}
197
198/// The kind and amount of indentation in a particular line. For now,
199/// assumes that indentation is all the same character.
200#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
201pub struct IndentSize {
202 /// The number of bytes that comprise the indentation.
203 pub len: u32,
204 /// The kind of whitespace used for indentation.
205 pub kind: IndentKind,
206}
207
208/// A whitespace character that's used for indentation.
209#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
210pub enum IndentKind {
211 /// An ASCII space character.
212 #[default]
213 Space,
214 /// An ASCII tab character.
215 Tab,
216}
217
218/// The shape of a selection cursor.
219#[derive(Copy, Clone, Debug, Default, PartialEq, Eq)]
220pub enum CursorShape {
221 /// A vertical bar
222 #[default]
223 Bar,
224 /// A block that surrounds the following character
225 Block,
226 /// An underline that runs along the following character
227 Underline,
228 /// A box drawn around the following character
229 Hollow,
230}
231
232impl From<settings::CursorShape> for CursorShape {
233 fn from(shape: settings::CursorShape) -> Self {
234 match shape {
235 settings::CursorShape::Bar => CursorShape::Bar,
236 settings::CursorShape::Block => CursorShape::Block,
237 settings::CursorShape::Underline => CursorShape::Underline,
238 settings::CursorShape::Hollow => CursorShape::Hollow,
239 }
240 }
241}
242
243#[derive(Clone, Debug)]
244struct SelectionSet {
245 line_mode: bool,
246 cursor_shape: CursorShape,
247 selections: Arc<[Selection<Anchor>]>,
248 lamport_timestamp: clock::Lamport,
249}
250
251/// A diagnostic associated with a certain range of a buffer.
252#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
253pub struct Diagnostic {
254 /// The name of the service that produced this diagnostic.
255 pub source: Option<String>,
256 /// A machine-readable code that identifies this diagnostic.
257 pub code: Option<NumberOrString>,
258 pub code_description: Option<lsp::Uri>,
259 /// Whether this diagnostic is a hint, warning, or error.
260 pub severity: DiagnosticSeverity,
261 /// The human-readable message associated with this diagnostic.
262 pub message: String,
263 /// The human-readable message (in markdown format)
264 pub markdown: Option<String>,
265 /// An id that identifies the group to which this diagnostic belongs.
266 ///
267 /// When a language server produces a diagnostic with
268 /// one or more associated diagnostics, those diagnostics are all
269 /// assigned a single group ID.
270 pub group_id: usize,
271 /// Whether this diagnostic is the primary diagnostic for its group.
272 ///
273 /// In a given group, the primary diagnostic is the top-level diagnostic
274 /// returned by the language server. The non-primary diagnostics are the
275 /// associated diagnostics.
276 pub is_primary: bool,
277 /// Whether this diagnostic is considered to originate from an analysis of
278 /// files on disk, as opposed to any unsaved buffer contents. This is a
279 /// property of a given diagnostic source, and is configured for a given
280 /// language server via the [`LspAdapter::disk_based_diagnostic_sources`](crate::LspAdapter::disk_based_diagnostic_sources) method
281 /// for the language server.
282 pub is_disk_based: bool,
283 /// Whether this diagnostic marks unnecessary code.
284 pub is_unnecessary: bool,
285 /// Quick separation of diagnostics groups based by their source.
286 pub source_kind: DiagnosticSourceKind,
287 /// Data from language server that produced this diagnostic. Passed back to the LS when we request code actions for this diagnostic.
288 pub data: Option<Value>,
289 /// Whether to underline the corresponding text range in the editor.
290 pub underline: bool,
291}
292
293#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)]
294pub enum DiagnosticSourceKind {
295 Pulled,
296 Pushed,
297 Other,
298}
299
300/// An operation used to synchronize this buffer with its other replicas.
301#[derive(Clone, Debug, PartialEq)]
302pub enum Operation {
303 /// A text operation.
304 Buffer(text::Operation),
305
306 /// An update to the buffer's diagnostics.
307 UpdateDiagnostics {
308 /// The id of the language server that produced the new diagnostics.
309 server_id: LanguageServerId,
310 /// The diagnostics.
311 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
312 /// The buffer's lamport timestamp.
313 lamport_timestamp: clock::Lamport,
314 },
315
316 /// An update to the most recent selections in this buffer.
317 UpdateSelections {
318 /// The selections.
319 selections: Arc<[Selection<Anchor>]>,
320 /// The buffer's lamport timestamp.
321 lamport_timestamp: clock::Lamport,
322 /// Whether the selections are in 'line mode'.
323 line_mode: bool,
324 /// The [`CursorShape`] associated with these selections.
325 cursor_shape: CursorShape,
326 },
327
328 /// An update to the characters that should trigger autocompletion
329 /// for this buffer.
330 UpdateCompletionTriggers {
331 /// The characters that trigger autocompletion.
332 triggers: Vec<String>,
333 /// The buffer's lamport timestamp.
334 lamport_timestamp: clock::Lamport,
335 /// The language server ID.
336 server_id: LanguageServerId,
337 },
338
339 /// An update to the line ending type of this buffer.
340 UpdateLineEnding {
341 /// The line ending type.
342 line_ending: LineEnding,
343 /// The buffer's lamport timestamp.
344 lamport_timestamp: clock::Lamport,
345 },
346}
347
348/// An event that occurs in a buffer.
349#[derive(Clone, Debug, PartialEq)]
350pub enum BufferEvent {
351 /// The buffer was changed in a way that must be
352 /// propagated to its other replicas.
353 Operation {
354 operation: Operation,
355 is_local: bool,
356 },
357 /// The buffer was edited.
358 Edited,
359 /// The buffer's `dirty` bit changed.
360 DirtyChanged,
361 /// The buffer was saved.
362 Saved,
363 /// The buffer's file was changed on disk.
364 FileHandleChanged,
365 /// The buffer was reloaded.
366 Reloaded,
367 /// The buffer is in need of a reload
368 ReloadNeeded,
369 /// The buffer's language was changed.
370 LanguageChanged,
371 /// The buffer's syntax trees were updated.
372 Reparsed,
373 /// The buffer's diagnostics were updated.
374 DiagnosticsUpdated,
375 /// The buffer gained or lost editing capabilities.
376 CapabilityChanged,
377}
378
379/// The file associated with a buffer.
380pub trait File: Send + Sync + Any {
381 /// Returns the [`LocalFile`] associated with this file, if the
382 /// file is local.
383 fn as_local(&self) -> Option<&dyn LocalFile>;
384
385 /// Returns whether this file is local.
386 fn is_local(&self) -> bool {
387 self.as_local().is_some()
388 }
389
390 /// Returns whether the file is new, exists in storage, or has been deleted. Includes metadata
391 /// only available in some states, such as modification time.
392 fn disk_state(&self) -> DiskState;
393
394 /// Returns the path of this file relative to the worktree's root directory.
395 fn path(&self) -> &Arc<RelPath>;
396
397 /// Returns the path of this file relative to the worktree's parent directory (this means it
398 /// includes the name of the worktree's root folder).
399 fn full_path(&self, cx: &App) -> PathBuf;
400
401 /// Returns the path style of this file.
402 fn path_style(&self, cx: &App) -> PathStyle;
403
404 /// Returns the last component of this handle's absolute path. If this handle refers to the root
405 /// of its worktree, then this method will return the name of the worktree itself.
406 fn file_name<'a>(&'a self, cx: &'a App) -> &'a str;
407
408 /// Returns the id of the worktree to which this file belongs.
409 ///
410 /// This is needed for looking up project-specific settings.
411 fn worktree_id(&self, cx: &App) -> WorktreeId;
412
413 /// Converts this file into a protobuf message.
414 fn to_proto(&self, cx: &App) -> rpc::proto::File;
415
416 /// Return whether Zed considers this to be a private file.
417 fn is_private(&self) -> bool;
418}
419
420/// The file's storage status - whether it's stored (`Present`), and if so when it was last
421/// modified. In the case where the file is not stored, it can be either `New` or `Deleted`. In the
422/// UI these two states are distinguished. For example, the buffer tab does not display a deletion
423/// indicator for new files.
424#[derive(Copy, Clone, Debug, PartialEq)]
425pub enum DiskState {
426 /// File created in Zed that has not been saved.
427 New,
428 /// File present on the filesystem.
429 Present { mtime: MTime },
430 /// Deleted file that was previously present.
431 Deleted,
432}
433
434impl DiskState {
435 /// Returns the file's last known modification time on disk.
436 pub fn mtime(self) -> Option<MTime> {
437 match self {
438 DiskState::New => None,
439 DiskState::Present { mtime } => Some(mtime),
440 DiskState::Deleted => None,
441 }
442 }
443
444 pub fn exists(&self) -> bool {
445 match self {
446 DiskState::New => false,
447 DiskState::Present { .. } => true,
448 DiskState::Deleted => false,
449 }
450 }
451}
452
453/// The file associated with a buffer, in the case where the file is on the local disk.
454pub trait LocalFile: File {
455 /// Returns the absolute path of this file
456 fn abs_path(&self, cx: &App) -> PathBuf;
457
458 /// Loads the file contents from disk and returns them as a UTF-8 encoded string.
459 fn load(&self, cx: &App) -> Task<Result<String>>;
460
461 /// Loads the file's contents from disk.
462 fn load_bytes(&self, cx: &App) -> Task<Result<Vec<u8>>>;
463}
464
465/// The auto-indent behavior associated with an editing operation.
466/// For some editing operations, each affected line of text has its
467/// indentation recomputed. For other operations, the entire block
468/// of edited text is adjusted uniformly.
469#[derive(Clone, Debug)]
470pub enum AutoindentMode {
471 /// Indent each line of inserted text.
472 EachLine,
473 /// Apply the same indentation adjustment to all of the lines
474 /// in a given insertion.
475 Block {
476 /// The original indentation column of the first line of each
477 /// insertion, if it has been copied.
478 ///
479 /// Knowing this makes it possible to preserve the relative indentation
480 /// of every line in the insertion from when it was copied.
481 ///
482 /// If the original indent column is `a`, and the first line of insertion
483 /// is then auto-indented to column `b`, then every other line of
484 /// the insertion will be auto-indented to column `b - a`
485 original_indent_columns: Vec<Option<u32>>,
486 },
487}
488
489#[derive(Clone)]
490struct AutoindentRequest {
491 before_edit: BufferSnapshot,
492 entries: Vec<AutoindentRequestEntry>,
493 is_block_mode: bool,
494 ignore_empty_lines: bool,
495}
496
497#[derive(Debug, Clone)]
498struct AutoindentRequestEntry {
499 /// A range of the buffer whose indentation should be adjusted.
500 range: Range<Anchor>,
501 /// Whether or not these lines should be considered brand new, for the
502 /// purpose of auto-indent. When text is not new, its indentation will
503 /// only be adjusted if the suggested indentation level has *changed*
504 /// since the edit was made.
505 first_line_is_new: bool,
506 indent_size: IndentSize,
507 original_indent_column: Option<u32>,
508}
509
510#[derive(Debug)]
511struct IndentSuggestion {
512 basis_row: u32,
513 delta: Ordering,
514 within_error: bool,
515}
516
517struct BufferChunkHighlights<'a> {
518 captures: SyntaxMapCaptures<'a>,
519 next_capture: Option<SyntaxMapCapture<'a>>,
520 stack: Vec<(usize, HighlightId)>,
521 highlight_maps: Vec<HighlightMap>,
522}
523
524/// An iterator that yields chunks of a buffer's text, along with their
525/// syntax highlights and diagnostic status.
526pub struct BufferChunks<'a> {
527 buffer_snapshot: Option<&'a BufferSnapshot>,
528 range: Range<usize>,
529 chunks: text::Chunks<'a>,
530 diagnostic_endpoints: Option<Peekable<vec::IntoIter<DiagnosticEndpoint>>>,
531 error_depth: usize,
532 warning_depth: usize,
533 information_depth: usize,
534 hint_depth: usize,
535 unnecessary_depth: usize,
536 underline: bool,
537 highlights: Option<BufferChunkHighlights<'a>>,
538}
539
540/// A chunk of a buffer's text, along with its syntax highlight and
541/// diagnostic status.
542#[derive(Clone, Debug, Default)]
543pub struct Chunk<'a> {
544 /// The text of the chunk.
545 pub text: &'a str,
546 /// The syntax highlighting style of the chunk.
547 pub syntax_highlight_id: Option<HighlightId>,
548 /// The highlight style that has been applied to this chunk in
549 /// the editor.
550 pub highlight_style: Option<HighlightStyle>,
551 /// The severity of diagnostic associated with this chunk, if any.
552 pub diagnostic_severity: Option<DiagnosticSeverity>,
553 /// A bitset of which characters are tabs in this string.
554 pub tabs: u128,
555 /// Bitmap of character indices in this chunk
556 pub chars: u128,
557 /// Whether this chunk of text is marked as unnecessary.
558 pub is_unnecessary: bool,
559 /// Whether this chunk of text was originally a tab character.
560 pub is_tab: bool,
561 /// Whether this chunk of text was originally an inlay.
562 pub is_inlay: bool,
563 /// Whether to underline the corresponding text range in the editor.
564 pub underline: bool,
565}
566
567/// A set of edits to a given version of a buffer, computed asynchronously.
568#[derive(Debug)]
569pub struct Diff {
570 pub base_version: clock::Global,
571 pub line_ending: LineEnding,
572 pub edits: Vec<(Range<usize>, Arc<str>)>,
573}
574
575#[derive(Debug, Clone, Copy)]
576pub(crate) struct DiagnosticEndpoint {
577 offset: usize,
578 is_start: bool,
579 underline: bool,
580 severity: DiagnosticSeverity,
581 is_unnecessary: bool,
582}
583
584/// A class of characters, used for characterizing a run of text.
585#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
586pub enum CharKind {
587 /// Whitespace.
588 Whitespace,
589 /// Punctuation.
590 Punctuation,
591 /// Word.
592 Word,
593}
594
595/// Context for character classification within a specific scope.
596#[derive(Copy, Clone, Eq, PartialEq, Debug)]
597pub enum CharScopeContext {
598 /// Character classification for completion queries.
599 ///
600 /// This context treats certain characters as word constituents that would
601 /// normally be considered punctuation, such as '-' in Tailwind classes
602 /// ("bg-yellow-100") or '.' in import paths ("foo.ts").
603 Completion,
604 /// Character classification for linked edits.
605 ///
606 /// This context handles characters that should be treated as part of
607 /// identifiers during linked editing operations, such as '.' in JSX
608 /// component names like `<Animated.View>`.
609 LinkedEdit,
610}
611
612/// A runnable is a set of data about a region that could be resolved into a task
613pub struct Runnable {
614 pub tags: SmallVec<[RunnableTag; 1]>,
615 pub language: Arc<Language>,
616 pub buffer: BufferId,
617}
618
619#[derive(Default, Clone, Debug)]
620pub struct HighlightedText {
621 pub text: SharedString,
622 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
623}
624
625#[derive(Default, Debug)]
626struct HighlightedTextBuilder {
627 pub text: String,
628 highlights: Vec<(Range<usize>, HighlightStyle)>,
629}
630
631impl HighlightedText {
632 pub fn from_buffer_range<T: ToOffset>(
633 range: Range<T>,
634 snapshot: &text::BufferSnapshot,
635 syntax_snapshot: &SyntaxSnapshot,
636 override_style: Option<HighlightStyle>,
637 syntax_theme: &SyntaxTheme,
638 ) -> Self {
639 let mut highlighted_text = HighlightedTextBuilder::default();
640 highlighted_text.add_text_from_buffer_range(
641 range,
642 snapshot,
643 syntax_snapshot,
644 override_style,
645 syntax_theme,
646 );
647 highlighted_text.build()
648 }
649
650 pub fn to_styled_text(&self, default_style: &TextStyle) -> StyledText {
651 gpui::StyledText::new(self.text.clone())
652 .with_default_highlights(default_style, self.highlights.iter().cloned())
653 }
654
655 /// Returns the first line without leading whitespace unless highlighted
656 /// and a boolean indicating if there are more lines after
657 pub fn first_line_preview(self) -> (Self, bool) {
658 let newline_ix = self.text.find('\n').unwrap_or(self.text.len());
659 let first_line = &self.text[..newline_ix];
660
661 // Trim leading whitespace, unless an edit starts prior to it.
662 let mut preview_start_ix = first_line.len() - first_line.trim_start().len();
663 if let Some((first_highlight_range, _)) = self.highlights.first() {
664 preview_start_ix = preview_start_ix.min(first_highlight_range.start);
665 }
666
667 let preview_text = &first_line[preview_start_ix..];
668 let preview_highlights = self
669 .highlights
670 .into_iter()
671 .skip_while(|(range, _)| range.end <= preview_start_ix)
672 .take_while(|(range, _)| range.start < newline_ix)
673 .filter_map(|(mut range, highlight)| {
674 range.start = range.start.saturating_sub(preview_start_ix);
675 range.end = range.end.min(newline_ix).saturating_sub(preview_start_ix);
676 if range.is_empty() {
677 None
678 } else {
679 Some((range, highlight))
680 }
681 });
682
683 let preview = Self {
684 text: SharedString::new(preview_text),
685 highlights: preview_highlights.collect(),
686 };
687
688 (preview, self.text.len() > newline_ix)
689 }
690}
691
692impl HighlightedTextBuilder {
693 pub fn build(self) -> HighlightedText {
694 HighlightedText {
695 text: self.text.into(),
696 highlights: self.highlights,
697 }
698 }
699
700 pub fn add_text_from_buffer_range<T: ToOffset>(
701 &mut self,
702 range: Range<T>,
703 snapshot: &text::BufferSnapshot,
704 syntax_snapshot: &SyntaxSnapshot,
705 override_style: Option<HighlightStyle>,
706 syntax_theme: &SyntaxTheme,
707 ) {
708 let range = range.to_offset(snapshot);
709 for chunk in Self::highlighted_chunks(range, snapshot, syntax_snapshot) {
710 let start = self.text.len();
711 self.text.push_str(chunk.text);
712 let end = self.text.len();
713
714 if let Some(highlight_style) = chunk
715 .syntax_highlight_id
716 .and_then(|id| id.style(syntax_theme))
717 {
718 let highlight_style = override_style.map_or(highlight_style, |override_style| {
719 highlight_style.highlight(override_style)
720 });
721 self.highlights.push((start..end, highlight_style));
722 } else if let Some(override_style) = override_style {
723 self.highlights.push((start..end, override_style));
724 }
725 }
726 }
727
728 fn highlighted_chunks<'a>(
729 range: Range<usize>,
730 snapshot: &'a text::BufferSnapshot,
731 syntax_snapshot: &'a SyntaxSnapshot,
732 ) -> BufferChunks<'a> {
733 let captures = syntax_snapshot.captures(range.clone(), snapshot, |grammar| {
734 grammar
735 .highlights_config
736 .as_ref()
737 .map(|config| &config.query)
738 });
739
740 let highlight_maps = captures
741 .grammars()
742 .iter()
743 .map(|grammar| grammar.highlight_map())
744 .collect();
745
746 BufferChunks::new(
747 snapshot.as_rope(),
748 range,
749 Some((captures, highlight_maps)),
750 false,
751 None,
752 )
753 }
754}
755
756#[derive(Clone)]
757pub struct EditPreview {
758 old_snapshot: text::BufferSnapshot,
759 applied_edits_snapshot: text::BufferSnapshot,
760 syntax_snapshot: SyntaxSnapshot,
761}
762
763impl EditPreview {
764 pub fn highlight_edits(
765 &self,
766 current_snapshot: &BufferSnapshot,
767 edits: &[(Range<Anchor>, String)],
768 include_deletions: bool,
769 cx: &App,
770 ) -> HighlightedText {
771 let Some(visible_range_in_preview_snapshot) = self.compute_visible_range(edits) else {
772 return HighlightedText::default();
773 };
774
775 let mut highlighted_text = HighlightedTextBuilder::default();
776
777 let mut offset_in_preview_snapshot = visible_range_in_preview_snapshot.start;
778
779 let insertion_highlight_style = HighlightStyle {
780 background_color: Some(cx.theme().status().created_background),
781 ..Default::default()
782 };
783 let deletion_highlight_style = HighlightStyle {
784 background_color: Some(cx.theme().status().deleted_background),
785 ..Default::default()
786 };
787 let syntax_theme = cx.theme().syntax();
788
789 for (range, edit_text) in edits {
790 let edit_new_end_in_preview_snapshot = range
791 .end
792 .bias_right(&self.old_snapshot)
793 .to_offset(&self.applied_edits_snapshot);
794 let edit_start_in_preview_snapshot = edit_new_end_in_preview_snapshot - edit_text.len();
795
796 let unchanged_range_in_preview_snapshot =
797 offset_in_preview_snapshot..edit_start_in_preview_snapshot;
798 if !unchanged_range_in_preview_snapshot.is_empty() {
799 highlighted_text.add_text_from_buffer_range(
800 unchanged_range_in_preview_snapshot,
801 &self.applied_edits_snapshot,
802 &self.syntax_snapshot,
803 None,
804 syntax_theme,
805 );
806 }
807
808 let range_in_current_snapshot = range.to_offset(current_snapshot);
809 if include_deletions && !range_in_current_snapshot.is_empty() {
810 highlighted_text.add_text_from_buffer_range(
811 range_in_current_snapshot,
812 ¤t_snapshot.text,
813 ¤t_snapshot.syntax,
814 Some(deletion_highlight_style),
815 syntax_theme,
816 );
817 }
818
819 if !edit_text.is_empty() {
820 highlighted_text.add_text_from_buffer_range(
821 edit_start_in_preview_snapshot..edit_new_end_in_preview_snapshot,
822 &self.applied_edits_snapshot,
823 &self.syntax_snapshot,
824 Some(insertion_highlight_style),
825 syntax_theme,
826 );
827 }
828
829 offset_in_preview_snapshot = edit_new_end_in_preview_snapshot;
830 }
831
832 highlighted_text.add_text_from_buffer_range(
833 offset_in_preview_snapshot..visible_range_in_preview_snapshot.end,
834 &self.applied_edits_snapshot,
835 &self.syntax_snapshot,
836 None,
837 syntax_theme,
838 );
839
840 highlighted_text.build()
841 }
842
843 fn compute_visible_range(&self, edits: &[(Range<Anchor>, String)]) -> Option<Range<usize>> {
844 let (first, _) = edits.first()?;
845 let (last, _) = edits.last()?;
846
847 let start = first
848 .start
849 .bias_left(&self.old_snapshot)
850 .to_point(&self.applied_edits_snapshot);
851 let end = last
852 .end
853 .bias_right(&self.old_snapshot)
854 .to_point(&self.applied_edits_snapshot);
855
856 // Ensure that the first line of the first edit and the last line of the last edit are always fully visible
857 let range = Point::new(start.row, 0)
858 ..Point::new(end.row, self.applied_edits_snapshot.line_len(end.row));
859
860 Some(range.to_offset(&self.applied_edits_snapshot))
861 }
862}
863
864#[derive(Clone, Debug, PartialEq, Eq)]
865pub struct BracketMatch {
866 pub open_range: Range<usize>,
867 pub close_range: Range<usize>,
868 pub newline_only: bool,
869 pub depth: usize,
870}
871
872impl BracketMatch {
873 pub fn bracket_ranges(self) -> (Range<usize>, Range<usize>) {
874 (self.open_range, self.close_range)
875 }
876}
877
878impl Buffer {
879 /// Create a new buffer with the given base text.
880 pub fn local<T: Into<String>>(base_text: T, cx: &Context<Self>) -> Self {
881 Self::build(
882 TextBuffer::new(
883 ReplicaId::LOCAL,
884 cx.entity_id().as_non_zero_u64().into(),
885 base_text.into(),
886 &cx.background_executor(),
887 ),
888 None,
889 Capability::ReadWrite,
890 )
891 }
892
893 /// Create a new buffer with the given base text that has proper line endings and other normalization applied.
894 pub fn local_normalized(
895 base_text_normalized: Rope,
896 line_ending: LineEnding,
897 cx: &Context<Self>,
898 ) -> Self {
899 Self::build(
900 TextBuffer::new_normalized(
901 ReplicaId::LOCAL,
902 cx.entity_id().as_non_zero_u64().into(),
903 line_ending,
904 base_text_normalized,
905 ),
906 None,
907 Capability::ReadWrite,
908 )
909 }
910
911 /// Create a new buffer that is a replica of a remote buffer.
912 pub fn remote(
913 remote_id: BufferId,
914 replica_id: ReplicaId,
915 capability: Capability,
916 base_text: impl Into<String>,
917 cx: &BackgroundExecutor,
918 ) -> Self {
919 Self::build(
920 TextBuffer::new(replica_id, remote_id, base_text.into(), cx),
921 None,
922 capability,
923 )
924 }
925
926 /// Create a new buffer that is a replica of a remote buffer, populating its
927 /// state from the given protobuf message.
928 pub fn from_proto(
929 replica_id: ReplicaId,
930 capability: Capability,
931 message: proto::BufferState,
932 file: Option<Arc<dyn File>>,
933 cx: &BackgroundExecutor,
934 ) -> Result<Self> {
935 let buffer_id = BufferId::new(message.id).context("Could not deserialize buffer_id")?;
936 let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text, cx);
937 let mut this = Self::build(buffer, file, capability);
938 this.text.set_line_ending(proto::deserialize_line_ending(
939 rpc::proto::LineEnding::from_i32(message.line_ending).context("missing line_ending")?,
940 ));
941 this.saved_version = proto::deserialize_version(&message.saved_version);
942 this.saved_mtime = message.saved_mtime.map(|time| time.into());
943 Ok(this)
944 }
945
946 /// Serialize the buffer's state to a protobuf message.
947 pub fn to_proto(&self, cx: &App) -> proto::BufferState {
948 proto::BufferState {
949 id: self.remote_id().into(),
950 file: self.file.as_ref().map(|f| f.to_proto(cx)),
951 base_text: self.base_text().to_string(),
952 line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
953 saved_version: proto::serialize_version(&self.saved_version),
954 saved_mtime: self.saved_mtime.map(|time| time.into()),
955 }
956 }
957
958 /// Serialize as protobufs all of the changes to the buffer since the given version.
959 pub fn serialize_ops(
960 &self,
961 since: Option<clock::Global>,
962 cx: &App,
963 ) -> Task<Vec<proto::Operation>> {
964 let mut operations = Vec::new();
965 operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
966
967 operations.extend(self.remote_selections.iter().map(|(_, set)| {
968 proto::serialize_operation(&Operation::UpdateSelections {
969 selections: set.selections.clone(),
970 lamport_timestamp: set.lamport_timestamp,
971 line_mode: set.line_mode,
972 cursor_shape: set.cursor_shape,
973 })
974 }));
975
976 for (server_id, diagnostics) in &self.diagnostics {
977 operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
978 lamport_timestamp: self.diagnostics_timestamp,
979 server_id: *server_id,
980 diagnostics: diagnostics.iter().cloned().collect(),
981 }));
982 }
983
984 for (server_id, completions) in &self.completion_triggers_per_language_server {
985 operations.push(proto::serialize_operation(
986 &Operation::UpdateCompletionTriggers {
987 triggers: completions.iter().cloned().collect(),
988 lamport_timestamp: self.completion_triggers_timestamp,
989 server_id: *server_id,
990 },
991 ));
992 }
993
994 let text_operations = self.text.operations().clone();
995 cx.background_spawn(async move {
996 let since = since.unwrap_or_default();
997 operations.extend(
998 text_operations
999 .iter()
1000 .filter(|(_, op)| !since.observed(op.timestamp()))
1001 .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
1002 );
1003 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
1004 operations
1005 })
1006 }
1007
1008 /// Assign a language to the buffer, returning the buffer.
1009 pub fn with_language(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
1010 self.set_language(Some(language), cx);
1011 self
1012 }
1013
1014 /// Returns the [`Capability`] of this buffer.
1015 pub fn capability(&self) -> Capability {
1016 self.capability
1017 }
1018
1019 /// Whether this buffer can only be read.
1020 pub fn read_only(&self) -> bool {
1021 self.capability == Capability::ReadOnly
1022 }
1023
1024 /// Builds a [`Buffer`] with the given underlying [`TextBuffer`], diff base, [`File`] and [`Capability`].
1025 pub fn build(buffer: TextBuffer, file: Option<Arc<dyn File>>, capability: Capability) -> Self {
1026 let saved_mtime = file.as_ref().and_then(|file| file.disk_state().mtime());
1027 let snapshot = buffer.snapshot();
1028 let syntax_map = Mutex::new(SyntaxMap::new(&snapshot));
1029 let tree_sitter_data = TreeSitterData::from_buffer_range(
1030 (0..buffer.len()).to_point(&buffer),
1031 buffer.version(),
1032 );
1033 Self {
1034 saved_mtime,
1035 tree_sitter_data: Arc::new(RwLock::new(tree_sitter_data)),
1036 saved_version: buffer.version(),
1037 preview_version: buffer.version(),
1038 reload_task: None,
1039 transaction_depth: 0,
1040 was_dirty_before_starting_transaction: None,
1041 has_unsaved_edits: Cell::new((buffer.version(), false)),
1042 text: buffer,
1043 branch_state: None,
1044 file,
1045 capability,
1046 syntax_map,
1047 reparse: None,
1048 non_text_state_update_count: 0,
1049 sync_parse_timeout: Duration::from_millis(1),
1050 parse_status: watch::channel(ParseStatus::Idle),
1051 autoindent_requests: Default::default(),
1052 wait_for_autoindent_txs: Default::default(),
1053 pending_autoindent: Default::default(),
1054 language: None,
1055 remote_selections: Default::default(),
1056 diagnostics: Default::default(),
1057 diagnostics_timestamp: Lamport::MIN,
1058 completion_triggers: Default::default(),
1059 completion_triggers_per_language_server: Default::default(),
1060 completion_triggers_timestamp: Lamport::MIN,
1061 deferred_ops: OperationQueue::new(),
1062 has_conflict: false,
1063 change_bits: Default::default(),
1064 _subscriptions: Vec::new(),
1065 }
1066 }
1067
1068 pub fn build_snapshot(
1069 text: Rope,
1070 language: Option<Arc<Language>>,
1071 language_registry: Option<Arc<LanguageRegistry>>,
1072 cx: &mut App,
1073 ) -> impl Future<Output = BufferSnapshot> + use<> {
1074 let entity_id = cx.reserve_entity::<Self>().entity_id();
1075 let buffer_id = entity_id.as_non_zero_u64().into();
1076 async move {
1077 let text =
1078 TextBuffer::new_normalized(ReplicaId::LOCAL, buffer_id, Default::default(), text)
1079 .snapshot();
1080 let mut syntax = SyntaxMap::new(&text).snapshot();
1081 if let Some(language) = language.clone() {
1082 let language_registry = language_registry.clone();
1083 syntax.reparse(&text, language_registry, language);
1084 }
1085 let tree_sitter_data = TreeSitterData::from_buffer_range(
1086 (0..text.len()).to_point(&text),
1087 text.version().clone(),
1088 );
1089 BufferSnapshot {
1090 text,
1091 syntax,
1092 file: None,
1093 diagnostics: Default::default(),
1094 remote_selections: Default::default(),
1095 tree_sitter_data: Arc::new(RwLock::new(tree_sitter_data)),
1096 language,
1097 non_text_state_update_count: 0,
1098 }
1099 }
1100 }
1101
1102 pub fn build_empty_snapshot(cx: &mut App) -> BufferSnapshot {
1103 let entity_id = cx.reserve_entity::<Self>().entity_id();
1104 let buffer_id = entity_id.as_non_zero_u64().into();
1105 let text = TextBuffer::new_normalized(
1106 ReplicaId::LOCAL,
1107 buffer_id,
1108 Default::default(),
1109 Rope::new(),
1110 )
1111 .snapshot();
1112 let syntax = SyntaxMap::new(&text).snapshot();
1113 let tree_sitter_data = TreeSitterData::from_buffer_range(
1114 (0..text.len()).to_point(&text),
1115 text.version().clone(),
1116 );
1117 BufferSnapshot {
1118 text,
1119 syntax,
1120 tree_sitter_data: Arc::new(RwLock::new(tree_sitter_data)),
1121 file: None,
1122 diagnostics: Default::default(),
1123 remote_selections: Default::default(),
1124 language: None,
1125 non_text_state_update_count: 0,
1126 }
1127 }
1128
1129 #[cfg(any(test, feature = "test-support"))]
1130 pub fn build_snapshot_sync(
1131 text: Rope,
1132 language: Option<Arc<Language>>,
1133 language_registry: Option<Arc<LanguageRegistry>>,
1134 cx: &mut App,
1135 ) -> BufferSnapshot {
1136 let entity_id = cx.reserve_entity::<Self>().entity_id();
1137 let buffer_id = entity_id.as_non_zero_u64().into();
1138 let text =
1139 TextBuffer::new_normalized(ReplicaId::LOCAL, buffer_id, Default::default(), text)
1140 .snapshot();
1141 let mut syntax = SyntaxMap::new(&text).snapshot();
1142 if let Some(language) = language.clone() {
1143 syntax.reparse(&text, language_registry, language);
1144 }
1145 let tree_sitter_data = TreeSitterData::from_buffer_range(
1146 (0..text.len()).to_point(&text),
1147 text.version().clone(),
1148 );
1149 BufferSnapshot {
1150 text,
1151 syntax,
1152 tree_sitter_data: Arc::new(RwLock::new(tree_sitter_data)),
1153 file: None,
1154 diagnostics: Default::default(),
1155 remote_selections: Default::default(),
1156 language,
1157 non_text_state_update_count: 0,
1158 }
1159 }
1160
1161 /// Retrieve a snapshot of the buffer's current state. This is computationally
1162 /// cheap, and allows reading from the buffer on a background thread.
1163 pub fn snapshot(&self) -> BufferSnapshot {
1164 let text = self.text.snapshot();
1165 let mut syntax_map = self.syntax_map.lock();
1166 syntax_map.interpolate(&text);
1167 let syntax = syntax_map.snapshot();
1168
1169 BufferSnapshot {
1170 text,
1171 syntax,
1172 tree_sitter_data: self.tree_sitter_data.clone(),
1173 file: self.file.clone(),
1174 remote_selections: self.remote_selections.clone(),
1175 diagnostics: self.diagnostics.clone(),
1176 language: self.language.clone(),
1177 non_text_state_update_count: self.non_text_state_update_count,
1178 }
1179 }
1180
1181 pub fn branch(&mut self, cx: &mut Context<Self>) -> Entity<Self> {
1182 let this = cx.entity();
1183 cx.new(|cx| {
1184 let mut branch = Self {
1185 branch_state: Some(BufferBranchState {
1186 base_buffer: this.clone(),
1187 merged_operations: Default::default(),
1188 }),
1189 language: self.language.clone(),
1190 has_conflict: self.has_conflict,
1191 has_unsaved_edits: Cell::new(self.has_unsaved_edits.get_mut().clone()),
1192 _subscriptions: vec![cx.subscribe(&this, Self::on_base_buffer_event)],
1193 ..Self::build(self.text.branch(), self.file.clone(), self.capability())
1194 };
1195 if let Some(language_registry) = self.language_registry() {
1196 branch.set_language_registry(language_registry);
1197 }
1198
1199 // Reparse the branch buffer so that we get syntax highlighting immediately.
1200 branch.reparse(cx);
1201
1202 branch
1203 })
1204 }
1205
1206 pub fn preview_edits(
1207 &self,
1208 edits: Arc<[(Range<Anchor>, String)]>,
1209 cx: &App,
1210 ) -> Task<EditPreview> {
1211 let registry = self.language_registry();
1212 let language = self.language().cloned();
1213 let old_snapshot = self.text.snapshot();
1214 let mut branch_buffer = self.text.branch();
1215 let mut syntax_snapshot = self.syntax_map.lock().snapshot();
1216 let executor = cx.background_executor().clone();
1217 cx.background_spawn(async move {
1218 if !edits.is_empty() {
1219 if let Some(language) = language.clone() {
1220 syntax_snapshot.reparse(&old_snapshot, registry.clone(), language);
1221 }
1222
1223 branch_buffer.edit(edits.iter().cloned(), &executor);
1224 let snapshot = branch_buffer.snapshot();
1225 syntax_snapshot.interpolate(&snapshot);
1226
1227 if let Some(language) = language {
1228 syntax_snapshot.reparse(&snapshot, registry, language);
1229 }
1230 }
1231 EditPreview {
1232 old_snapshot,
1233 applied_edits_snapshot: branch_buffer.snapshot(),
1234 syntax_snapshot,
1235 }
1236 })
1237 }
1238
1239 /// Applies all of the changes in this buffer that intersect any of the
1240 /// given `ranges` to its base buffer.
1241 ///
1242 /// If `ranges` is empty, then all changes will be applied. This buffer must
1243 /// be a branch buffer to call this method.
1244 pub fn merge_into_base(&mut self, ranges: Vec<Range<usize>>, cx: &mut Context<Self>) {
1245 let Some(base_buffer) = self.base_buffer() else {
1246 debug_panic!("not a branch buffer");
1247 return;
1248 };
1249
1250 let mut ranges = if ranges.is_empty() {
1251 &[0..usize::MAX]
1252 } else {
1253 ranges.as_slice()
1254 }
1255 .iter()
1256 .peekable();
1257
1258 let mut edits = Vec::new();
1259 for edit in self.edits_since::<usize>(&base_buffer.read(cx).version()) {
1260 let mut is_included = false;
1261 while let Some(range) = ranges.peek() {
1262 if range.end < edit.new.start {
1263 ranges.next().unwrap();
1264 } else {
1265 if range.start <= edit.new.end {
1266 is_included = true;
1267 }
1268 break;
1269 }
1270 }
1271
1272 if is_included {
1273 edits.push((
1274 edit.old.clone(),
1275 self.text_for_range(edit.new.clone()).collect::<String>(),
1276 ));
1277 }
1278 }
1279
1280 let operation = base_buffer.update(cx, |base_buffer, cx| {
1281 // cx.emit(BufferEvent::DiffBaseChanged);
1282 base_buffer.edit(edits, None, cx)
1283 });
1284
1285 if let Some(operation) = operation
1286 && let Some(BufferBranchState {
1287 merged_operations, ..
1288 }) = &mut self.branch_state
1289 {
1290 merged_operations.push(operation);
1291 }
1292 }
1293
1294 fn on_base_buffer_event(
1295 &mut self,
1296 _: Entity<Buffer>,
1297 event: &BufferEvent,
1298 cx: &mut Context<Self>,
1299 ) {
1300 let BufferEvent::Operation { operation, .. } = event else {
1301 return;
1302 };
1303 let Some(BufferBranchState {
1304 merged_operations, ..
1305 }) = &mut self.branch_state
1306 else {
1307 return;
1308 };
1309
1310 let mut operation_to_undo = None;
1311 if let Operation::Buffer(text::Operation::Edit(operation)) = &operation
1312 && let Ok(ix) = merged_operations.binary_search(&operation.timestamp)
1313 {
1314 merged_operations.remove(ix);
1315 operation_to_undo = Some(operation.timestamp);
1316 }
1317
1318 self.apply_ops([operation.clone()], cx);
1319
1320 if let Some(timestamp) = operation_to_undo {
1321 let counts = [(timestamp, u32::MAX)].into_iter().collect();
1322 self.undo_operations(counts, cx);
1323 }
1324 }
1325
1326 #[cfg(test)]
1327 pub(crate) fn as_text_snapshot(&self) -> &text::BufferSnapshot {
1328 &self.text
1329 }
1330
1331 /// Retrieve a snapshot of the buffer's raw text, without any
1332 /// language-related state like the syntax tree or diagnostics.
1333 pub fn text_snapshot(&self) -> text::BufferSnapshot {
1334 self.text.snapshot()
1335 }
1336
1337 /// The file associated with the buffer, if any.
1338 pub fn file(&self) -> Option<&Arc<dyn File>> {
1339 self.file.as_ref()
1340 }
1341
1342 /// The version of the buffer that was last saved or reloaded from disk.
1343 pub fn saved_version(&self) -> &clock::Global {
1344 &self.saved_version
1345 }
1346
1347 /// The mtime of the buffer's file when the buffer was last saved or reloaded from disk.
1348 pub fn saved_mtime(&self) -> Option<MTime> {
1349 self.saved_mtime
1350 }
1351
1352 /// Assign a language to the buffer.
1353 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1354 self.non_text_state_update_count += 1;
1355 self.syntax_map.lock().clear(&self.text);
1356 self.language = language;
1357 self.was_changed();
1358 self.reparse(cx);
1359 cx.emit(BufferEvent::LanguageChanged);
1360 }
1361
1362 /// Assign a language registry to the buffer. This allows the buffer to retrieve
1363 /// other languages if parts of the buffer are written in different languages.
1364 pub fn set_language_registry(&self, language_registry: Arc<LanguageRegistry>) {
1365 self.syntax_map
1366 .lock()
1367 .set_language_registry(language_registry);
1368 }
1369
1370 pub fn language_registry(&self) -> Option<Arc<LanguageRegistry>> {
1371 self.syntax_map.lock().language_registry()
1372 }
1373
1374 /// Assign the line ending type to the buffer.
1375 pub fn set_line_ending(&mut self, line_ending: LineEnding, cx: &mut Context<Self>) {
1376 self.text.set_line_ending(line_ending);
1377
1378 let lamport_timestamp = self.text.lamport_clock.tick();
1379 self.send_operation(
1380 Operation::UpdateLineEnding {
1381 line_ending,
1382 lamport_timestamp,
1383 },
1384 true,
1385 cx,
1386 );
1387 }
1388
1389 /// Assign the buffer a new [`Capability`].
1390 pub fn set_capability(&mut self, capability: Capability, cx: &mut Context<Self>) {
1391 if self.capability != capability {
1392 self.capability = capability;
1393 cx.emit(BufferEvent::CapabilityChanged)
1394 }
1395 }
1396
1397 /// This method is called to signal that the buffer has been saved.
1398 pub fn did_save(
1399 &mut self,
1400 version: clock::Global,
1401 mtime: Option<MTime>,
1402 cx: &mut Context<Self>,
1403 ) {
1404 self.saved_version = version.clone();
1405 self.has_unsaved_edits.set((version, false));
1406 self.has_conflict = false;
1407 self.saved_mtime = mtime;
1408 self.was_changed();
1409 cx.emit(BufferEvent::Saved);
1410 cx.notify();
1411 }
1412
1413 /// Reloads the contents of the buffer from disk.
1414 pub fn reload(&mut self, cx: &Context<Self>) -> oneshot::Receiver<Option<Transaction>> {
1415 let (tx, rx) = futures::channel::oneshot::channel();
1416 let prev_version = self.text.version();
1417 self.reload_task = Some(cx.spawn(async move |this, cx| {
1418 let Some((new_mtime, new_text)) = this.update(cx, |this, cx| {
1419 let file = this.file.as_ref()?.as_local()?;
1420
1421 Some((file.disk_state().mtime(), file.load(cx)))
1422 })?
1423 else {
1424 return Ok(());
1425 };
1426
1427 let new_text = new_text.await?;
1428 let diff = this
1429 .update(cx, |this, cx| this.diff(new_text.clone(), cx))?
1430 .await;
1431 this.update(cx, |this, cx| {
1432 if this.version() == diff.base_version {
1433 this.finalize_last_transaction();
1434 this.apply_diff(diff, cx);
1435 tx.send(this.finalize_last_transaction().cloned()).ok();
1436 this.has_conflict = false;
1437 this.did_reload(this.version(), this.line_ending(), new_mtime, cx);
1438 } else {
1439 if !diff.edits.is_empty()
1440 || this
1441 .edits_since::<usize>(&diff.base_version)
1442 .next()
1443 .is_some()
1444 {
1445 this.has_conflict = true;
1446 }
1447
1448 this.did_reload(prev_version, this.line_ending(), this.saved_mtime, cx);
1449 }
1450
1451 this.reload_task.take();
1452 })
1453 }));
1454 rx
1455 }
1456
1457 /// This method is called to signal that the buffer has been reloaded.
1458 pub fn did_reload(
1459 &mut self,
1460 version: clock::Global,
1461 line_ending: LineEnding,
1462 mtime: Option<MTime>,
1463 cx: &mut Context<Self>,
1464 ) {
1465 self.saved_version = version;
1466 self.has_unsaved_edits
1467 .set((self.saved_version.clone(), false));
1468 self.text.set_line_ending(line_ending);
1469 self.saved_mtime = mtime;
1470 cx.emit(BufferEvent::Reloaded);
1471 cx.notify();
1472 }
1473
1474 /// Updates the [`File`] backing this buffer. This should be called when
1475 /// the file has changed or has been deleted.
1476 pub fn file_updated(&mut self, new_file: Arc<dyn File>, cx: &mut Context<Self>) {
1477 let was_dirty = self.is_dirty();
1478 let mut file_changed = false;
1479
1480 if let Some(old_file) = self.file.as_ref() {
1481 if new_file.path() != old_file.path() {
1482 file_changed = true;
1483 }
1484
1485 let old_state = old_file.disk_state();
1486 let new_state = new_file.disk_state();
1487 if old_state != new_state {
1488 file_changed = true;
1489 if !was_dirty && matches!(new_state, DiskState::Present { .. }) {
1490 cx.emit(BufferEvent::ReloadNeeded)
1491 }
1492 }
1493 } else {
1494 file_changed = true;
1495 };
1496
1497 self.file = Some(new_file);
1498 if file_changed {
1499 self.was_changed();
1500 self.non_text_state_update_count += 1;
1501 if was_dirty != self.is_dirty() {
1502 cx.emit(BufferEvent::DirtyChanged);
1503 }
1504 cx.emit(BufferEvent::FileHandleChanged);
1505 cx.notify();
1506 }
1507 }
1508
1509 pub fn base_buffer(&self) -> Option<Entity<Self>> {
1510 Some(self.branch_state.as_ref()?.base_buffer.clone())
1511 }
1512
1513 /// Returns the primary [`Language`] assigned to this [`Buffer`].
1514 pub fn language(&self) -> Option<&Arc<Language>> {
1515 self.language.as_ref()
1516 }
1517
1518 /// Returns the [`Language`] at the given location.
1519 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
1520 let offset = position.to_offset(self);
1521 let mut is_first = true;
1522 let start_anchor = self.anchor_before(offset);
1523 let end_anchor = self.anchor_after(offset);
1524 self.syntax_map
1525 .lock()
1526 .layers_for_range(offset..offset, &self.text, false)
1527 .filter(|layer| {
1528 if is_first {
1529 is_first = false;
1530 return true;
1531 }
1532
1533 layer
1534 .included_sub_ranges
1535 .map(|sub_ranges| {
1536 sub_ranges.iter().any(|sub_range| {
1537 let is_before_start = sub_range.end.cmp(&start_anchor, self).is_lt();
1538 let is_after_end = sub_range.start.cmp(&end_anchor, self).is_gt();
1539 !is_before_start && !is_after_end
1540 })
1541 })
1542 .unwrap_or(true)
1543 })
1544 .last()
1545 .map(|info| info.language.clone())
1546 .or_else(|| self.language.clone())
1547 }
1548
1549 /// Returns each [`Language`] for the active syntax layers at the given location.
1550 pub fn languages_at<D: ToOffset>(&self, position: D) -> Vec<Arc<Language>> {
1551 let offset = position.to_offset(self);
1552 let mut languages: Vec<Arc<Language>> = self
1553 .syntax_map
1554 .lock()
1555 .layers_for_range(offset..offset, &self.text, false)
1556 .map(|info| info.language.clone())
1557 .collect();
1558
1559 if languages.is_empty()
1560 && let Some(buffer_language) = self.language()
1561 {
1562 languages.push(buffer_language.clone());
1563 }
1564
1565 languages
1566 }
1567
1568 /// An integer version number that accounts for all updates besides
1569 /// the buffer's text itself (which is versioned via a version vector).
1570 pub fn non_text_state_update_count(&self) -> usize {
1571 self.non_text_state_update_count
1572 }
1573
1574 /// Whether the buffer is being parsed in the background.
1575 #[cfg(any(test, feature = "test-support"))]
1576 pub fn is_parsing(&self) -> bool {
1577 self.reparse.is_some()
1578 }
1579
1580 /// Indicates whether the buffer contains any regions that may be
1581 /// written in a language that hasn't been loaded yet.
1582 pub fn contains_unknown_injections(&self) -> bool {
1583 self.syntax_map.lock().contains_unknown_injections()
1584 }
1585
1586 #[cfg(any(test, feature = "test-support"))]
1587 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
1588 self.sync_parse_timeout = timeout;
1589 }
1590
1591 /// Called after an edit to synchronize the buffer's main parse tree with
1592 /// the buffer's new underlying state.
1593 ///
1594 /// Locks the syntax map and interpolates the edits since the last reparse
1595 /// into the foreground syntax tree.
1596 ///
1597 /// Then takes a stable snapshot of the syntax map before unlocking it.
1598 /// The snapshot with the interpolated edits is sent to a background thread,
1599 /// where we ask Tree-sitter to perform an incremental parse.
1600 ///
1601 /// Meanwhile, in the foreground, we block the main thread for up to 1ms
1602 /// waiting on the parse to complete. As soon as it completes, we proceed
1603 /// synchronously, unless a 1ms timeout elapses.
1604 ///
1605 /// If we time out waiting on the parse, we spawn a second task waiting
1606 /// until the parse does complete and return with the interpolated tree still
1607 /// in the foreground. When the background parse completes, call back into
1608 /// the main thread and assign the foreground parse state.
1609 ///
1610 /// If the buffer or grammar changed since the start of the background parse,
1611 /// initiate an additional reparse recursively. To avoid concurrent parses
1612 /// for the same buffer, we only initiate a new parse if we are not already
1613 /// parsing in the background.
1614 pub fn reparse(&mut self, cx: &mut Context<Self>) {
1615 if self.reparse.is_some() {
1616 return;
1617 }
1618 let language = if let Some(language) = self.language.clone() {
1619 language
1620 } else {
1621 return;
1622 };
1623
1624 let text = self.text_snapshot();
1625 let parsed_version = self.version();
1626
1627 let mut syntax_map = self.syntax_map.lock();
1628 syntax_map.interpolate(&text);
1629 let language_registry = syntax_map.language_registry();
1630 let mut syntax_snapshot = syntax_map.snapshot();
1631 drop(syntax_map);
1632
1633 let parse_task = cx.background_spawn({
1634 let language = language.clone();
1635 let language_registry = language_registry.clone();
1636 async move {
1637 syntax_snapshot.reparse(&text, language_registry, language);
1638 syntax_snapshot
1639 }
1640 });
1641
1642 self.parse_status.0.send(ParseStatus::Parsing).unwrap();
1643 match cx
1644 .background_executor()
1645 .block_with_timeout(self.sync_parse_timeout, parse_task)
1646 {
1647 Ok(new_syntax_snapshot) => {
1648 self.did_finish_parsing(new_syntax_snapshot, cx);
1649 self.reparse = None;
1650 }
1651 Err(parse_task) => {
1652 // todo(lw): hot foreground spawn
1653 self.reparse = Some(cx.spawn(async move |this, cx| {
1654 let new_syntax_map = cx.background_spawn(parse_task).await;
1655 this.update(cx, move |this, cx| {
1656 let grammar_changed = || {
1657 this.language.as_ref().is_none_or(|current_language| {
1658 !Arc::ptr_eq(&language, current_language)
1659 })
1660 };
1661 let language_registry_changed = || {
1662 new_syntax_map.contains_unknown_injections()
1663 && language_registry.is_some_and(|registry| {
1664 registry.version() != new_syntax_map.language_registry_version()
1665 })
1666 };
1667 let parse_again = this.version.changed_since(&parsed_version)
1668 || language_registry_changed()
1669 || grammar_changed();
1670 this.did_finish_parsing(new_syntax_map, cx);
1671 this.reparse = None;
1672 if parse_again {
1673 this.reparse(cx);
1674 }
1675 })
1676 .ok();
1677 }));
1678 }
1679 }
1680 }
1681
1682 fn did_finish_parsing(&mut self, syntax_snapshot: SyntaxSnapshot, cx: &mut Context<Self>) {
1683 self.was_changed();
1684 self.non_text_state_update_count += 1;
1685 self.syntax_map.lock().did_parse(syntax_snapshot);
1686 self.request_autoindent(cx);
1687 self.parse_status.0.send(ParseStatus::Idle).unwrap();
1688 cx.emit(BufferEvent::Reparsed);
1689 cx.notify();
1690 }
1691
1692 pub fn parse_status(&self) -> watch::Receiver<ParseStatus> {
1693 self.parse_status.1.clone()
1694 }
1695
1696 /// Assign to the buffer a set of diagnostics created by a given language server.
1697 pub fn update_diagnostics(
1698 &mut self,
1699 server_id: LanguageServerId,
1700 diagnostics: DiagnosticSet,
1701 cx: &mut Context<Self>,
1702 ) {
1703 let lamport_timestamp = self.text.lamport_clock.tick();
1704 let op = Operation::UpdateDiagnostics {
1705 server_id,
1706 diagnostics: diagnostics.iter().cloned().collect(),
1707 lamport_timestamp,
1708 };
1709
1710 self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
1711 self.send_operation(op, true, cx);
1712 }
1713
1714 pub fn buffer_diagnostics(
1715 &self,
1716 for_server: Option<LanguageServerId>,
1717 ) -> Vec<&DiagnosticEntry<Anchor>> {
1718 match for_server {
1719 Some(server_id) => match self.diagnostics.binary_search_by_key(&server_id, |v| v.0) {
1720 Ok(idx) => self.diagnostics[idx].1.iter().collect(),
1721 Err(_) => Vec::new(),
1722 },
1723 None => self
1724 .diagnostics
1725 .iter()
1726 .flat_map(|(_, diagnostic_set)| diagnostic_set.iter())
1727 .collect(),
1728 }
1729 }
1730
1731 fn request_autoindent(&mut self, cx: &mut Context<Self>) {
1732 if let Some(indent_sizes) = self.compute_autoindents() {
1733 let indent_sizes = cx.background_spawn(indent_sizes);
1734 match cx
1735 .background_executor()
1736 .block_with_timeout(Duration::from_micros(500), indent_sizes)
1737 {
1738 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
1739 Err(indent_sizes) => {
1740 self.pending_autoindent = Some(cx.spawn(async move |this, cx| {
1741 let indent_sizes = indent_sizes.await;
1742 this.update(cx, |this, cx| {
1743 this.apply_autoindents(indent_sizes, cx);
1744 })
1745 .ok();
1746 }));
1747 }
1748 }
1749 } else {
1750 self.autoindent_requests.clear();
1751 for tx in self.wait_for_autoindent_txs.drain(..) {
1752 tx.send(()).ok();
1753 }
1754 }
1755 }
1756
1757 fn compute_autoindents(
1758 &self,
1759 ) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>> + use<>> {
1760 let max_rows_between_yields = 100;
1761 let snapshot = self.snapshot();
1762 if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
1763 return None;
1764 }
1765
1766 let autoindent_requests = self.autoindent_requests.clone();
1767 Some(async move {
1768 let mut indent_sizes = BTreeMap::<u32, (IndentSize, bool)>::new();
1769 for request in autoindent_requests {
1770 // Resolve each edited range to its row in the current buffer and in the
1771 // buffer before this batch of edits.
1772 let mut row_ranges = Vec::new();
1773 let mut old_to_new_rows = BTreeMap::new();
1774 let mut language_indent_sizes_by_new_row = Vec::new();
1775 for entry in &request.entries {
1776 let position = entry.range.start;
1777 let new_row = position.to_point(&snapshot).row;
1778 let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
1779 language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
1780
1781 if !entry.first_line_is_new {
1782 let old_row = position.to_point(&request.before_edit).row;
1783 old_to_new_rows.insert(old_row, new_row);
1784 }
1785 row_ranges.push((new_row..new_end_row, entry.original_indent_column));
1786 }
1787
1788 // Build a map containing the suggested indentation for each of the edited lines
1789 // with respect to the state of the buffer before these edits. This map is keyed
1790 // by the rows for these lines in the current state of the buffer.
1791 let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
1792 let old_edited_ranges =
1793 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
1794 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1795 let mut language_indent_size = IndentSize::default();
1796 for old_edited_range in old_edited_ranges {
1797 let suggestions = request
1798 .before_edit
1799 .suggest_autoindents(old_edited_range.clone())
1800 .into_iter()
1801 .flatten();
1802 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1803 if let Some(suggestion) = suggestion {
1804 let new_row = *old_to_new_rows.get(&old_row).unwrap();
1805
1806 // Find the indent size based on the language for this row.
1807 while let Some((row, size)) = language_indent_sizes.peek() {
1808 if *row > new_row {
1809 break;
1810 }
1811 language_indent_size = *size;
1812 language_indent_sizes.next();
1813 }
1814
1815 let suggested_indent = old_to_new_rows
1816 .get(&suggestion.basis_row)
1817 .and_then(|from_row| {
1818 Some(old_suggestions.get(from_row).copied()?.0)
1819 })
1820 .unwrap_or_else(|| {
1821 request
1822 .before_edit
1823 .indent_size_for_line(suggestion.basis_row)
1824 })
1825 .with_delta(suggestion.delta, language_indent_size);
1826 old_suggestions
1827 .insert(new_row, (suggested_indent, suggestion.within_error));
1828 }
1829 }
1830 yield_now().await;
1831 }
1832
1833 // Compute new suggestions for each line, but only include them in the result
1834 // if they differ from the old suggestion for that line.
1835 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1836 let mut language_indent_size = IndentSize::default();
1837 for (row_range, original_indent_column) in row_ranges {
1838 let new_edited_row_range = if request.is_block_mode {
1839 row_range.start..row_range.start + 1
1840 } else {
1841 row_range.clone()
1842 };
1843
1844 let suggestions = snapshot
1845 .suggest_autoindents(new_edited_row_range.clone())
1846 .into_iter()
1847 .flatten();
1848 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
1849 if let Some(suggestion) = suggestion {
1850 // Find the indent size based on the language for this row.
1851 while let Some((row, size)) = language_indent_sizes.peek() {
1852 if *row > new_row {
1853 break;
1854 }
1855 language_indent_size = *size;
1856 language_indent_sizes.next();
1857 }
1858
1859 let suggested_indent = indent_sizes
1860 .get(&suggestion.basis_row)
1861 .copied()
1862 .map(|e| e.0)
1863 .unwrap_or_else(|| {
1864 snapshot.indent_size_for_line(suggestion.basis_row)
1865 })
1866 .with_delta(suggestion.delta, language_indent_size);
1867
1868 if old_suggestions.get(&new_row).is_none_or(
1869 |(old_indentation, was_within_error)| {
1870 suggested_indent != *old_indentation
1871 && (!suggestion.within_error || *was_within_error)
1872 },
1873 ) {
1874 indent_sizes.insert(
1875 new_row,
1876 (suggested_indent, request.ignore_empty_lines),
1877 );
1878 }
1879 }
1880 }
1881
1882 if let (true, Some(original_indent_column)) =
1883 (request.is_block_mode, original_indent_column)
1884 {
1885 let new_indent =
1886 if let Some((indent, _)) = indent_sizes.get(&row_range.start) {
1887 *indent
1888 } else {
1889 snapshot.indent_size_for_line(row_range.start)
1890 };
1891 let delta = new_indent.len as i64 - original_indent_column as i64;
1892 if delta != 0 {
1893 for row in row_range.skip(1) {
1894 indent_sizes.entry(row).or_insert_with(|| {
1895 let mut size = snapshot.indent_size_for_line(row);
1896 if size.kind == new_indent.kind {
1897 match delta.cmp(&0) {
1898 Ordering::Greater => size.len += delta as u32,
1899 Ordering::Less => {
1900 size.len = size.len.saturating_sub(-delta as u32)
1901 }
1902 Ordering::Equal => {}
1903 }
1904 }
1905 (size, request.ignore_empty_lines)
1906 });
1907 }
1908 }
1909 }
1910
1911 yield_now().await;
1912 }
1913 }
1914
1915 indent_sizes
1916 .into_iter()
1917 .filter_map(|(row, (indent, ignore_empty_lines))| {
1918 if ignore_empty_lines && snapshot.line_len(row) == 0 {
1919 None
1920 } else {
1921 Some((row, indent))
1922 }
1923 })
1924 .collect()
1925 })
1926 }
1927
1928 fn apply_autoindents(
1929 &mut self,
1930 indent_sizes: BTreeMap<u32, IndentSize>,
1931 cx: &mut Context<Self>,
1932 ) {
1933 self.autoindent_requests.clear();
1934 for tx in self.wait_for_autoindent_txs.drain(..) {
1935 tx.send(()).ok();
1936 }
1937
1938 let edits: Vec<_> = indent_sizes
1939 .into_iter()
1940 .filter_map(|(row, indent_size)| {
1941 let current_size = indent_size_for_line(self, row);
1942 Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
1943 })
1944 .collect();
1945
1946 let preserve_preview = self.preserve_preview();
1947 self.edit(edits, None, cx);
1948 if preserve_preview {
1949 self.refresh_preview();
1950 }
1951 }
1952
1953 /// Create a minimal edit that will cause the given row to be indented
1954 /// with the given size. After applying this edit, the length of the line
1955 /// will always be at least `new_size.len`.
1956 pub fn edit_for_indent_size_adjustment(
1957 row: u32,
1958 current_size: IndentSize,
1959 new_size: IndentSize,
1960 ) -> Option<(Range<Point>, String)> {
1961 if new_size.kind == current_size.kind {
1962 match new_size.len.cmp(¤t_size.len) {
1963 Ordering::Greater => {
1964 let point = Point::new(row, 0);
1965 Some((
1966 point..point,
1967 iter::repeat(new_size.char())
1968 .take((new_size.len - current_size.len) as usize)
1969 .collect::<String>(),
1970 ))
1971 }
1972
1973 Ordering::Less => Some((
1974 Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
1975 String::new(),
1976 )),
1977
1978 Ordering::Equal => None,
1979 }
1980 } else {
1981 Some((
1982 Point::new(row, 0)..Point::new(row, current_size.len),
1983 iter::repeat(new_size.char())
1984 .take(new_size.len as usize)
1985 .collect::<String>(),
1986 ))
1987 }
1988 }
1989
1990 /// Spawns a background task that asynchronously computes a `Diff` between the buffer's text
1991 /// and the given new text.
1992 pub fn diff(&self, mut new_text: String, cx: &App) -> Task<Diff> {
1993 let old_text = self.as_rope().clone();
1994 let base_version = self.version();
1995 cx.background_executor()
1996 .spawn_labeled(*BUFFER_DIFF_TASK, async move {
1997 let old_text = old_text.to_string();
1998 let line_ending = LineEnding::detect(&new_text);
1999 LineEnding::normalize(&mut new_text);
2000 let edits = text_diff(&old_text, &new_text);
2001 Diff {
2002 base_version,
2003 line_ending,
2004 edits,
2005 }
2006 })
2007 }
2008
2009 /// Spawns a background task that searches the buffer for any whitespace
2010 /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
2011 pub fn remove_trailing_whitespace(&self, cx: &App) -> Task<Diff> {
2012 let old_text = self.as_rope().clone();
2013 let line_ending = self.line_ending();
2014 let base_version = self.version();
2015 cx.background_spawn(async move {
2016 let ranges = trailing_whitespace_ranges(&old_text);
2017 let empty = Arc::<str>::from("");
2018 Diff {
2019 base_version,
2020 line_ending,
2021 edits: ranges
2022 .into_iter()
2023 .map(|range| (range, empty.clone()))
2024 .collect(),
2025 }
2026 })
2027 }
2028
2029 /// Ensures that the buffer ends with a single newline character, and
2030 /// no other whitespace. Skips if the buffer is empty.
2031 pub fn ensure_final_newline(&mut self, cx: &mut Context<Self>) {
2032 let len = self.len();
2033 if len == 0 {
2034 return;
2035 }
2036 let mut offset = len;
2037 for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
2038 let non_whitespace_len = chunk
2039 .trim_end_matches(|c: char| c.is_ascii_whitespace())
2040 .len();
2041 offset -= chunk.len();
2042 offset += non_whitespace_len;
2043 if non_whitespace_len != 0 {
2044 if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
2045 return;
2046 }
2047 break;
2048 }
2049 }
2050 self.edit([(offset..len, "\n")], None, cx);
2051 }
2052
2053 /// Applies a diff to the buffer. If the buffer has changed since the given diff was
2054 /// calculated, then adjust the diff to account for those changes, and discard any
2055 /// parts of the diff that conflict with those changes.
2056 pub fn apply_diff(&mut self, diff: Diff, cx: &mut Context<Self>) -> Option<TransactionId> {
2057 let snapshot = self.snapshot();
2058 let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
2059 let mut delta = 0;
2060 let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
2061 while let Some(edit_since) = edits_since.peek() {
2062 // If the edit occurs after a diff hunk, then it does not
2063 // affect that hunk.
2064 if edit_since.old.start > range.end {
2065 break;
2066 }
2067 // If the edit precedes the diff hunk, then adjust the hunk
2068 // to reflect the edit.
2069 else if edit_since.old.end < range.start {
2070 delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
2071 edits_since.next();
2072 }
2073 // If the edit intersects a diff hunk, then discard that hunk.
2074 else {
2075 return None;
2076 }
2077 }
2078
2079 let start = (range.start as i64 + delta) as usize;
2080 let end = (range.end as i64 + delta) as usize;
2081 Some((start..end, new_text))
2082 });
2083
2084 self.start_transaction();
2085 self.text.set_line_ending(diff.line_ending);
2086 self.edit(adjusted_edits, None, cx);
2087 self.end_transaction(cx)
2088 }
2089
2090 pub fn has_unsaved_edits(&self) -> bool {
2091 let (last_version, has_unsaved_edits) = self.has_unsaved_edits.take();
2092
2093 if last_version == self.version {
2094 self.has_unsaved_edits
2095 .set((last_version, has_unsaved_edits));
2096 return has_unsaved_edits;
2097 }
2098
2099 let has_edits = self.has_edits_since(&self.saved_version);
2100 self.has_unsaved_edits
2101 .set((self.version.clone(), has_edits));
2102 has_edits
2103 }
2104
2105 /// Checks if the buffer has unsaved changes.
2106 pub fn is_dirty(&self) -> bool {
2107 if self.capability == Capability::ReadOnly {
2108 return false;
2109 }
2110 if self.has_conflict {
2111 return true;
2112 }
2113 match self.file.as_ref().map(|f| f.disk_state()) {
2114 Some(DiskState::New) | Some(DiskState::Deleted) => {
2115 !self.is_empty() && self.has_unsaved_edits()
2116 }
2117 _ => self.has_unsaved_edits(),
2118 }
2119 }
2120
2121 /// Checks if the buffer and its file have both changed since the buffer
2122 /// was last saved or reloaded.
2123 pub fn has_conflict(&self) -> bool {
2124 if self.has_conflict {
2125 return true;
2126 }
2127 let Some(file) = self.file.as_ref() else {
2128 return false;
2129 };
2130 match file.disk_state() {
2131 DiskState::New => false,
2132 DiskState::Present { mtime } => match self.saved_mtime {
2133 Some(saved_mtime) => {
2134 mtime.bad_is_greater_than(saved_mtime) && self.has_unsaved_edits()
2135 }
2136 None => true,
2137 },
2138 DiskState::Deleted => false,
2139 }
2140 }
2141
2142 /// Gets a [`Subscription`] that tracks all of the changes to the buffer's text.
2143 pub fn subscribe(&mut self) -> Subscription {
2144 self.text.subscribe()
2145 }
2146
2147 /// Adds a bit to the list of bits that are set when the buffer's text changes.
2148 ///
2149 /// This allows downstream code to check if the buffer's text has changed without
2150 /// waiting for an effect cycle, which would be required if using eents.
2151 pub fn record_changes(&mut self, bit: rc::Weak<Cell<bool>>) {
2152 if let Err(ix) = self
2153 .change_bits
2154 .binary_search_by_key(&rc::Weak::as_ptr(&bit), rc::Weak::as_ptr)
2155 {
2156 self.change_bits.insert(ix, bit);
2157 }
2158 }
2159
2160 /// Set the change bit for all "listeners".
2161 fn was_changed(&mut self) {
2162 self.change_bits.retain(|change_bit| {
2163 change_bit
2164 .upgrade()
2165 .inspect(|bit| {
2166 _ = bit.replace(true);
2167 })
2168 .is_some()
2169 });
2170 }
2171
2172 /// Starts a transaction, if one is not already in-progress. When undoing or
2173 /// redoing edits, all of the edits performed within a transaction are undone
2174 /// or redone together.
2175 pub fn start_transaction(&mut self) -> Option<TransactionId> {
2176 self.start_transaction_at(Instant::now())
2177 }
2178
2179 /// Starts a transaction, providing the current time. Subsequent transactions
2180 /// that occur within a short period of time will be grouped together. This
2181 /// is controlled by the buffer's undo grouping duration.
2182 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
2183 self.transaction_depth += 1;
2184 if self.was_dirty_before_starting_transaction.is_none() {
2185 self.was_dirty_before_starting_transaction = Some(self.is_dirty());
2186 }
2187 self.text.start_transaction_at(now)
2188 }
2189
2190 /// Terminates the current transaction, if this is the outermost transaction.
2191 pub fn end_transaction(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2192 self.end_transaction_at(Instant::now(), cx)
2193 }
2194
2195 /// Terminates the current transaction, providing the current time. Subsequent transactions
2196 /// that occur within a short period of time will be grouped together. This
2197 /// is controlled by the buffer's undo grouping duration.
2198 pub fn end_transaction_at(
2199 &mut self,
2200 now: Instant,
2201 cx: &mut Context<Self>,
2202 ) -> Option<TransactionId> {
2203 assert!(self.transaction_depth > 0);
2204 self.transaction_depth -= 1;
2205 let was_dirty = if self.transaction_depth == 0 {
2206 self.was_dirty_before_starting_transaction.take().unwrap()
2207 } else {
2208 false
2209 };
2210 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
2211 self.did_edit(&start_version, was_dirty, cx);
2212 Some(transaction_id)
2213 } else {
2214 None
2215 }
2216 }
2217
2218 /// Manually add a transaction to the buffer's undo history.
2219 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
2220 self.text.push_transaction(transaction, now);
2221 }
2222
2223 /// Differs from `push_transaction` in that it does not clear the redo
2224 /// stack. Intended to be used to create a parent transaction to merge
2225 /// potential child transactions into.
2226 ///
2227 /// The caller is responsible for removing it from the undo history using
2228 /// `forget_transaction` if no edits are merged into it. Otherwise, if edits
2229 /// are merged into this transaction, the caller is responsible for ensuring
2230 /// the redo stack is cleared. The easiest way to ensure the redo stack is
2231 /// cleared is to create transactions with the usual `start_transaction` and
2232 /// `end_transaction` methods and merging the resulting transactions into
2233 /// the transaction created by this method
2234 pub fn push_empty_transaction(&mut self, now: Instant) -> TransactionId {
2235 self.text.push_empty_transaction(now)
2236 }
2237
2238 /// Prevent the last transaction from being grouped with any subsequent transactions,
2239 /// even if they occur with the buffer's undo grouping duration.
2240 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
2241 self.text.finalize_last_transaction()
2242 }
2243
2244 /// Manually group all changes since a given transaction.
2245 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
2246 self.text.group_until_transaction(transaction_id);
2247 }
2248
2249 /// Manually remove a transaction from the buffer's undo history
2250 pub fn forget_transaction(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
2251 self.text.forget_transaction(transaction_id)
2252 }
2253
2254 /// Retrieve a transaction from the buffer's undo history
2255 pub fn get_transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> {
2256 self.text.get_transaction(transaction_id)
2257 }
2258
2259 /// Manually merge two transactions in the buffer's undo history.
2260 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
2261 self.text.merge_transactions(transaction, destination);
2262 }
2263
2264 /// Waits for the buffer to receive operations with the given timestamps.
2265 pub fn wait_for_edits<It: IntoIterator<Item = clock::Lamport>>(
2266 &mut self,
2267 edit_ids: It,
2268 ) -> impl Future<Output = Result<()>> + use<It> {
2269 self.text.wait_for_edits(edit_ids)
2270 }
2271
2272 /// Waits for the buffer to receive the operations necessary for resolving the given anchors.
2273 pub fn wait_for_anchors<It: IntoIterator<Item = Anchor>>(
2274 &mut self,
2275 anchors: It,
2276 ) -> impl 'static + Future<Output = Result<()>> + use<It> {
2277 self.text.wait_for_anchors(anchors)
2278 }
2279
2280 /// Waits for the buffer to receive operations up to the given version.
2281 pub fn wait_for_version(
2282 &mut self,
2283 version: clock::Global,
2284 ) -> impl Future<Output = Result<()>> + use<> {
2285 self.text.wait_for_version(version)
2286 }
2287
2288 /// Forces all futures returned by [`Buffer::wait_for_version`], [`Buffer::wait_for_edits`], or
2289 /// [`Buffer::wait_for_version`] to resolve with an error.
2290 pub fn give_up_waiting(&mut self) {
2291 self.text.give_up_waiting();
2292 }
2293
2294 pub fn wait_for_autoindent_applied(&mut self) -> Option<oneshot::Receiver<()>> {
2295 let mut rx = None;
2296 if !self.autoindent_requests.is_empty() {
2297 let channel = oneshot::channel();
2298 self.wait_for_autoindent_txs.push(channel.0);
2299 rx = Some(channel.1);
2300 }
2301 rx
2302 }
2303
2304 /// Stores a set of selections that should be broadcasted to all of the buffer's replicas.
2305 pub fn set_active_selections(
2306 &mut self,
2307 selections: Arc<[Selection<Anchor>]>,
2308 line_mode: bool,
2309 cursor_shape: CursorShape,
2310 cx: &mut Context<Self>,
2311 ) {
2312 let lamport_timestamp = self.text.lamport_clock.tick();
2313 self.remote_selections.insert(
2314 self.text.replica_id(),
2315 SelectionSet {
2316 selections: selections.clone(),
2317 lamport_timestamp,
2318 line_mode,
2319 cursor_shape,
2320 },
2321 );
2322 self.send_operation(
2323 Operation::UpdateSelections {
2324 selections,
2325 line_mode,
2326 lamport_timestamp,
2327 cursor_shape,
2328 },
2329 true,
2330 cx,
2331 );
2332 self.non_text_state_update_count += 1;
2333 cx.notify();
2334 }
2335
2336 /// Clears the selections, so that other replicas of the buffer do not see any selections for
2337 /// this replica.
2338 pub fn remove_active_selections(&mut self, cx: &mut Context<Self>) {
2339 if self
2340 .remote_selections
2341 .get(&self.text.replica_id())
2342 .is_none_or(|set| !set.selections.is_empty())
2343 {
2344 self.set_active_selections(Arc::default(), false, Default::default(), cx);
2345 }
2346 }
2347
2348 pub fn set_agent_selections(
2349 &mut self,
2350 selections: Arc<[Selection<Anchor>]>,
2351 line_mode: bool,
2352 cursor_shape: CursorShape,
2353 cx: &mut Context<Self>,
2354 ) {
2355 let lamport_timestamp = self.text.lamport_clock.tick();
2356 self.remote_selections.insert(
2357 ReplicaId::AGENT,
2358 SelectionSet {
2359 selections,
2360 lamport_timestamp,
2361 line_mode,
2362 cursor_shape,
2363 },
2364 );
2365 self.non_text_state_update_count += 1;
2366 cx.notify();
2367 }
2368
2369 pub fn remove_agent_selections(&mut self, cx: &mut Context<Self>) {
2370 self.set_agent_selections(Arc::default(), false, Default::default(), cx);
2371 }
2372
2373 /// Replaces the buffer's entire text.
2374 pub fn set_text<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2375 where
2376 T: Into<Arc<str>>,
2377 {
2378 self.autoindent_requests.clear();
2379 self.edit([(0..self.len(), text)], None, cx)
2380 }
2381
2382 /// Appends the given text to the end of the buffer.
2383 pub fn append<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2384 where
2385 T: Into<Arc<str>>,
2386 {
2387 self.edit([(self.len()..self.len(), text)], None, cx)
2388 }
2389
2390 /// Applies the given edits to the buffer. Each edit is specified as a range of text to
2391 /// delete, and a string of text to insert at that location.
2392 ///
2393 /// If an [`AutoindentMode`] is provided, then the buffer will enqueue an auto-indent
2394 /// request for the edited ranges, which will be processed when the buffer finishes
2395 /// parsing.
2396 ///
2397 /// Parsing takes place at the end of a transaction, and may compute synchronously
2398 /// or asynchronously, depending on the changes.
2399 pub fn edit<I, S, T>(
2400 &mut self,
2401 edits_iter: I,
2402 autoindent_mode: Option<AutoindentMode>,
2403 cx: &mut Context<Self>,
2404 ) -> Option<clock::Lamport>
2405 where
2406 I: IntoIterator<Item = (Range<S>, T)>,
2407 S: ToOffset,
2408 T: Into<Arc<str>>,
2409 {
2410 // Skip invalid edits and coalesce contiguous ones.
2411 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
2412
2413 for (range, new_text) in edits_iter {
2414 let mut range = range.start.to_offset(self)..range.end.to_offset(self);
2415
2416 if range.start > range.end {
2417 mem::swap(&mut range.start, &mut range.end);
2418 }
2419 let new_text = new_text.into();
2420 if !new_text.is_empty() || !range.is_empty() {
2421 if let Some((prev_range, prev_text)) = edits.last_mut()
2422 && prev_range.end >= range.start
2423 {
2424 prev_range.end = cmp::max(prev_range.end, range.end);
2425 *prev_text = format!("{prev_text}{new_text}").into();
2426 } else {
2427 edits.push((range, new_text));
2428 }
2429 }
2430 }
2431 if edits.is_empty() {
2432 return None;
2433 }
2434
2435 self.start_transaction();
2436 self.pending_autoindent.take();
2437 let autoindent_request = autoindent_mode
2438 .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
2439
2440 let edit_operation = self
2441 .text
2442 .edit(edits.iter().cloned(), cx.background_executor());
2443 let edit_id = edit_operation.timestamp();
2444
2445 if let Some((before_edit, mode)) = autoindent_request {
2446 let mut delta = 0isize;
2447 let mut previous_setting = None;
2448 let entries: Vec<_> = edits
2449 .into_iter()
2450 .enumerate()
2451 .zip(&edit_operation.as_edit().unwrap().new_text)
2452 .filter(|((_, (range, _)), _)| {
2453 let language = before_edit.language_at(range.start);
2454 let language_id = language.map(|l| l.id());
2455 if let Some((cached_language_id, auto_indent)) = previous_setting
2456 && cached_language_id == language_id
2457 {
2458 auto_indent
2459 } else {
2460 // The auto-indent setting is not present in editorconfigs, hence
2461 // we can avoid passing the file here.
2462 let auto_indent =
2463 language_settings(language.map(|l| l.name()), None, cx).auto_indent;
2464 previous_setting = Some((language_id, auto_indent));
2465 auto_indent
2466 }
2467 })
2468 .map(|((ix, (range, _)), new_text)| {
2469 let new_text_length = new_text.len();
2470 let old_start = range.start.to_point(&before_edit);
2471 let new_start = (delta + range.start as isize) as usize;
2472 let range_len = range.end - range.start;
2473 delta += new_text_length as isize - range_len as isize;
2474
2475 // Decide what range of the insertion to auto-indent, and whether
2476 // the first line of the insertion should be considered a newly-inserted line
2477 // or an edit to an existing line.
2478 let mut range_of_insertion_to_indent = 0..new_text_length;
2479 let mut first_line_is_new = true;
2480
2481 let old_line_start = before_edit.indent_size_for_line(old_start.row).len;
2482 let old_line_end = before_edit.line_len(old_start.row);
2483
2484 if old_start.column > old_line_start {
2485 first_line_is_new = false;
2486 }
2487
2488 if !new_text.contains('\n')
2489 && (old_start.column + (range_len as u32) < old_line_end
2490 || old_line_end == old_line_start)
2491 {
2492 first_line_is_new = false;
2493 }
2494
2495 // When inserting text starting with a newline, avoid auto-indenting the
2496 // previous line.
2497 if new_text.starts_with('\n') {
2498 range_of_insertion_to_indent.start += 1;
2499 first_line_is_new = true;
2500 }
2501
2502 let mut original_indent_column = None;
2503 if let AutoindentMode::Block {
2504 original_indent_columns,
2505 } = &mode
2506 {
2507 original_indent_column = Some(if new_text.starts_with('\n') {
2508 indent_size_for_text(
2509 new_text[range_of_insertion_to_indent.clone()].chars(),
2510 )
2511 .len
2512 } else {
2513 original_indent_columns
2514 .get(ix)
2515 .copied()
2516 .flatten()
2517 .unwrap_or_else(|| {
2518 indent_size_for_text(
2519 new_text[range_of_insertion_to_indent.clone()].chars(),
2520 )
2521 .len
2522 })
2523 });
2524
2525 // Avoid auto-indenting the line after the edit.
2526 if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
2527 range_of_insertion_to_indent.end -= 1;
2528 }
2529 }
2530
2531 AutoindentRequestEntry {
2532 first_line_is_new,
2533 original_indent_column,
2534 indent_size: before_edit.language_indent_size_at(range.start, cx),
2535 range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
2536 ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
2537 }
2538 })
2539 .collect();
2540
2541 if !entries.is_empty() {
2542 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2543 before_edit,
2544 entries,
2545 is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
2546 ignore_empty_lines: false,
2547 }));
2548 }
2549 }
2550
2551 self.end_transaction(cx);
2552 self.send_operation(Operation::Buffer(edit_operation), true, cx);
2553 Some(edit_id)
2554 }
2555
2556 fn did_edit(&mut self, old_version: &clock::Global, was_dirty: bool, cx: &mut Context<Self>) {
2557 self.was_changed();
2558
2559 if self.edits_since::<usize>(old_version).next().is_none() {
2560 return;
2561 }
2562
2563 self.reparse(cx);
2564 cx.emit(BufferEvent::Edited);
2565 if was_dirty != self.is_dirty() {
2566 cx.emit(BufferEvent::DirtyChanged);
2567 }
2568 cx.notify();
2569 }
2570
2571 pub fn autoindent_ranges<I, T>(&mut self, ranges: I, cx: &mut Context<Self>)
2572 where
2573 I: IntoIterator<Item = Range<T>>,
2574 T: ToOffset + Copy,
2575 {
2576 let before_edit = self.snapshot();
2577 let entries = ranges
2578 .into_iter()
2579 .map(|range| AutoindentRequestEntry {
2580 range: before_edit.anchor_before(range.start)..before_edit.anchor_after(range.end),
2581 first_line_is_new: true,
2582 indent_size: before_edit.language_indent_size_at(range.start, cx),
2583 original_indent_column: None,
2584 })
2585 .collect();
2586 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2587 before_edit,
2588 entries,
2589 is_block_mode: false,
2590 ignore_empty_lines: true,
2591 }));
2592 self.request_autoindent(cx);
2593 }
2594
2595 // Inserts newlines at the given position to create an empty line, returning the start of the new line.
2596 // You can also request the insertion of empty lines above and below the line starting at the returned point.
2597 pub fn insert_empty_line(
2598 &mut self,
2599 position: impl ToPoint,
2600 space_above: bool,
2601 space_below: bool,
2602 cx: &mut Context<Self>,
2603 ) -> Point {
2604 let mut position = position.to_point(self);
2605
2606 self.start_transaction();
2607
2608 self.edit(
2609 [(position..position, "\n")],
2610 Some(AutoindentMode::EachLine),
2611 cx,
2612 );
2613
2614 if position.column > 0 {
2615 position += Point::new(1, 0);
2616 }
2617
2618 if !self.is_line_blank(position.row) {
2619 self.edit(
2620 [(position..position, "\n")],
2621 Some(AutoindentMode::EachLine),
2622 cx,
2623 );
2624 }
2625
2626 if space_above && position.row > 0 && !self.is_line_blank(position.row - 1) {
2627 self.edit(
2628 [(position..position, "\n")],
2629 Some(AutoindentMode::EachLine),
2630 cx,
2631 );
2632 position.row += 1;
2633 }
2634
2635 if space_below
2636 && (position.row == self.max_point().row || !self.is_line_blank(position.row + 1))
2637 {
2638 self.edit(
2639 [(position..position, "\n")],
2640 Some(AutoindentMode::EachLine),
2641 cx,
2642 );
2643 }
2644
2645 self.end_transaction(cx);
2646
2647 position
2648 }
2649
2650 /// Applies the given remote operations to the buffer.
2651 pub fn apply_ops<I: IntoIterator<Item = Operation>>(&mut self, ops: I, cx: &mut Context<Self>) {
2652 self.pending_autoindent.take();
2653 let was_dirty = self.is_dirty();
2654 let old_version = self.version.clone();
2655 let mut deferred_ops = Vec::new();
2656 let buffer_ops = ops
2657 .into_iter()
2658 .filter_map(|op| match op {
2659 Operation::Buffer(op) => Some(op),
2660 _ => {
2661 if self.can_apply_op(&op) {
2662 self.apply_op(op, cx);
2663 } else {
2664 deferred_ops.push(op);
2665 }
2666 None
2667 }
2668 })
2669 .collect::<Vec<_>>();
2670 for operation in buffer_ops.iter() {
2671 self.send_operation(Operation::Buffer(operation.clone()), false, cx);
2672 }
2673 self.text
2674 .apply_ops(buffer_ops, Some(cx.background_executor()));
2675 self.deferred_ops.insert(deferred_ops);
2676 self.flush_deferred_ops(cx);
2677 self.did_edit(&old_version, was_dirty, cx);
2678 // Notify independently of whether the buffer was edited as the operations could include a
2679 // selection update.
2680 cx.notify();
2681 }
2682
2683 fn flush_deferred_ops(&mut self, cx: &mut Context<Self>) {
2684 let mut deferred_ops = Vec::new();
2685 for op in self.deferred_ops.drain().iter().cloned() {
2686 if self.can_apply_op(&op) {
2687 self.apply_op(op, cx);
2688 } else {
2689 deferred_ops.push(op);
2690 }
2691 }
2692 self.deferred_ops.insert(deferred_ops);
2693 }
2694
2695 pub fn has_deferred_ops(&self) -> bool {
2696 !self.deferred_ops.is_empty() || self.text.has_deferred_ops()
2697 }
2698
2699 fn can_apply_op(&self, operation: &Operation) -> bool {
2700 match operation {
2701 Operation::Buffer(_) => {
2702 unreachable!("buffer operations should never be applied at this layer")
2703 }
2704 Operation::UpdateDiagnostics {
2705 diagnostics: diagnostic_set,
2706 ..
2707 } => diagnostic_set.iter().all(|diagnostic| {
2708 self.text.can_resolve(&diagnostic.range.start)
2709 && self.text.can_resolve(&diagnostic.range.end)
2710 }),
2711 Operation::UpdateSelections { selections, .. } => selections
2712 .iter()
2713 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
2714 Operation::UpdateCompletionTriggers { .. } | Operation::UpdateLineEnding { .. } => true,
2715 }
2716 }
2717
2718 fn apply_op(&mut self, operation: Operation, cx: &mut Context<Self>) {
2719 match operation {
2720 Operation::Buffer(_) => {
2721 unreachable!("buffer operations should never be applied at this layer")
2722 }
2723 Operation::UpdateDiagnostics {
2724 server_id,
2725 diagnostics: diagnostic_set,
2726 lamport_timestamp,
2727 } => {
2728 let snapshot = self.snapshot();
2729 self.apply_diagnostic_update(
2730 server_id,
2731 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
2732 lamport_timestamp,
2733 cx,
2734 );
2735 }
2736 Operation::UpdateSelections {
2737 selections,
2738 lamport_timestamp,
2739 line_mode,
2740 cursor_shape,
2741 } => {
2742 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id)
2743 && set.lamport_timestamp > lamport_timestamp
2744 {
2745 return;
2746 }
2747
2748 self.remote_selections.insert(
2749 lamport_timestamp.replica_id,
2750 SelectionSet {
2751 selections,
2752 lamport_timestamp,
2753 line_mode,
2754 cursor_shape,
2755 },
2756 );
2757 self.text.lamport_clock.observe(lamport_timestamp);
2758 self.non_text_state_update_count += 1;
2759 }
2760 Operation::UpdateCompletionTriggers {
2761 triggers,
2762 lamport_timestamp,
2763 server_id,
2764 } => {
2765 if triggers.is_empty() {
2766 self.completion_triggers_per_language_server
2767 .remove(&server_id);
2768 self.completion_triggers = self
2769 .completion_triggers_per_language_server
2770 .values()
2771 .flat_map(|triggers| triggers.iter().cloned())
2772 .collect();
2773 } else {
2774 self.completion_triggers_per_language_server
2775 .insert(server_id, triggers.iter().cloned().collect());
2776 self.completion_triggers.extend(triggers);
2777 }
2778 self.text.lamport_clock.observe(lamport_timestamp);
2779 }
2780 Operation::UpdateLineEnding {
2781 line_ending,
2782 lamport_timestamp,
2783 } => {
2784 self.text.set_line_ending(line_ending);
2785 self.text.lamport_clock.observe(lamport_timestamp);
2786 }
2787 }
2788 }
2789
2790 fn apply_diagnostic_update(
2791 &mut self,
2792 server_id: LanguageServerId,
2793 diagnostics: DiagnosticSet,
2794 lamport_timestamp: clock::Lamport,
2795 cx: &mut Context<Self>,
2796 ) {
2797 if lamport_timestamp > self.diagnostics_timestamp {
2798 let ix = self.diagnostics.binary_search_by_key(&server_id, |e| e.0);
2799 if diagnostics.is_empty() {
2800 if let Ok(ix) = ix {
2801 self.diagnostics.remove(ix);
2802 }
2803 } else {
2804 match ix {
2805 Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)),
2806 Ok(ix) => self.diagnostics[ix].1 = diagnostics,
2807 };
2808 }
2809 self.diagnostics_timestamp = lamport_timestamp;
2810 self.non_text_state_update_count += 1;
2811 self.text.lamport_clock.observe(lamport_timestamp);
2812 cx.notify();
2813 cx.emit(BufferEvent::DiagnosticsUpdated);
2814 }
2815 }
2816
2817 fn send_operation(&mut self, operation: Operation, is_local: bool, cx: &mut Context<Self>) {
2818 self.was_changed();
2819 cx.emit(BufferEvent::Operation {
2820 operation,
2821 is_local,
2822 });
2823 }
2824
2825 /// Removes the selections for a given peer.
2826 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut Context<Self>) {
2827 self.remote_selections.remove(&replica_id);
2828 cx.notify();
2829 }
2830
2831 /// Undoes the most recent transaction.
2832 pub fn undo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2833 let was_dirty = self.is_dirty();
2834 let old_version = self.version.clone();
2835
2836 if let Some((transaction_id, operation)) = self.text.undo() {
2837 self.send_operation(Operation::Buffer(operation), true, cx);
2838 self.did_edit(&old_version, was_dirty, cx);
2839 Some(transaction_id)
2840 } else {
2841 None
2842 }
2843 }
2844
2845 /// Manually undoes a specific transaction in the buffer's undo history.
2846 pub fn undo_transaction(
2847 &mut self,
2848 transaction_id: TransactionId,
2849 cx: &mut Context<Self>,
2850 ) -> bool {
2851 let was_dirty = self.is_dirty();
2852 let old_version = self.version.clone();
2853 if let Some(operation) = self.text.undo_transaction(transaction_id) {
2854 self.send_operation(Operation::Buffer(operation), true, cx);
2855 self.did_edit(&old_version, was_dirty, cx);
2856 true
2857 } else {
2858 false
2859 }
2860 }
2861
2862 /// Manually undoes all changes after a given transaction in the buffer's undo history.
2863 pub fn undo_to_transaction(
2864 &mut self,
2865 transaction_id: TransactionId,
2866 cx: &mut Context<Self>,
2867 ) -> bool {
2868 let was_dirty = self.is_dirty();
2869 let old_version = self.version.clone();
2870
2871 let operations = self.text.undo_to_transaction(transaction_id);
2872 let undone = !operations.is_empty();
2873 for operation in operations {
2874 self.send_operation(Operation::Buffer(operation), true, cx);
2875 }
2876 if undone {
2877 self.did_edit(&old_version, was_dirty, cx)
2878 }
2879 undone
2880 }
2881
2882 pub fn undo_operations(&mut self, counts: HashMap<Lamport, u32>, cx: &mut Context<Buffer>) {
2883 let was_dirty = self.is_dirty();
2884 let operation = self.text.undo_operations(counts);
2885 let old_version = self.version.clone();
2886 self.send_operation(Operation::Buffer(operation), true, cx);
2887 self.did_edit(&old_version, was_dirty, cx);
2888 }
2889
2890 /// Manually redoes a specific transaction in the buffer's redo history.
2891 pub fn redo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2892 let was_dirty = self.is_dirty();
2893 let old_version = self.version.clone();
2894
2895 if let Some((transaction_id, operation)) = self.text.redo() {
2896 self.send_operation(Operation::Buffer(operation), true, cx);
2897 self.did_edit(&old_version, was_dirty, cx);
2898 Some(transaction_id)
2899 } else {
2900 None
2901 }
2902 }
2903
2904 /// Manually undoes all changes until a given transaction in the buffer's redo history.
2905 pub fn redo_to_transaction(
2906 &mut self,
2907 transaction_id: TransactionId,
2908 cx: &mut Context<Self>,
2909 ) -> bool {
2910 let was_dirty = self.is_dirty();
2911 let old_version = self.version.clone();
2912
2913 let operations = self.text.redo_to_transaction(transaction_id);
2914 let redone = !operations.is_empty();
2915 for operation in operations {
2916 self.send_operation(Operation::Buffer(operation), true, cx);
2917 }
2918 if redone {
2919 self.did_edit(&old_version, was_dirty, cx)
2920 }
2921 redone
2922 }
2923
2924 /// Override current completion triggers with the user-provided completion triggers.
2925 pub fn set_completion_triggers(
2926 &mut self,
2927 server_id: LanguageServerId,
2928 triggers: BTreeSet<String>,
2929 cx: &mut Context<Self>,
2930 ) {
2931 self.completion_triggers_timestamp = self.text.lamport_clock.tick();
2932 if triggers.is_empty() {
2933 self.completion_triggers_per_language_server
2934 .remove(&server_id);
2935 self.completion_triggers = self
2936 .completion_triggers_per_language_server
2937 .values()
2938 .flat_map(|triggers| triggers.iter().cloned())
2939 .collect();
2940 } else {
2941 self.completion_triggers_per_language_server
2942 .insert(server_id, triggers.clone());
2943 self.completion_triggers.extend(triggers.iter().cloned());
2944 }
2945 self.send_operation(
2946 Operation::UpdateCompletionTriggers {
2947 triggers: triggers.into_iter().collect(),
2948 lamport_timestamp: self.completion_triggers_timestamp,
2949 server_id,
2950 },
2951 true,
2952 cx,
2953 );
2954 cx.notify();
2955 }
2956
2957 /// Returns a list of strings which trigger a completion menu for this language.
2958 /// Usually this is driven by LSP server which returns a list of trigger characters for completions.
2959 pub fn completion_triggers(&self) -> &BTreeSet<String> {
2960 &self.completion_triggers
2961 }
2962
2963 /// Call this directly after performing edits to prevent the preview tab
2964 /// from being dismissed by those edits. It causes `should_dismiss_preview`
2965 /// to return false until there are additional edits.
2966 pub fn refresh_preview(&mut self) {
2967 self.preview_version = self.version.clone();
2968 }
2969
2970 /// Whether we should preserve the preview status of a tab containing this buffer.
2971 pub fn preserve_preview(&self) -> bool {
2972 !self.has_edits_since(&self.preview_version)
2973 }
2974}
2975
2976#[doc(hidden)]
2977#[cfg(any(test, feature = "test-support"))]
2978impl Buffer {
2979 pub fn edit_via_marked_text(
2980 &mut self,
2981 marked_string: &str,
2982 autoindent_mode: Option<AutoindentMode>,
2983 cx: &mut Context<Self>,
2984 ) {
2985 let edits = self.edits_for_marked_text(marked_string);
2986 self.edit(edits, autoindent_mode, cx);
2987 }
2988
2989 pub fn set_group_interval(&mut self, group_interval: Duration) {
2990 self.text.set_group_interval(group_interval);
2991 }
2992
2993 pub fn randomly_edit<T>(&mut self, rng: &mut T, old_range_count: usize, cx: &mut Context<Self>)
2994 where
2995 T: rand::Rng,
2996 {
2997 let mut edits: Vec<(Range<usize>, String)> = Vec::new();
2998 let mut last_end = None;
2999 for _ in 0..old_range_count {
3000 if last_end.is_some_and(|last_end| last_end >= self.len()) {
3001 break;
3002 }
3003
3004 let new_start = last_end.map_or(0, |last_end| last_end + 1);
3005 let mut range = self.random_byte_range(new_start, rng);
3006 if rng.random_bool(0.2) {
3007 mem::swap(&mut range.start, &mut range.end);
3008 }
3009 last_end = Some(range.end);
3010
3011 let new_text_len = rng.random_range(0..10);
3012 let mut new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
3013 new_text = new_text.to_uppercase();
3014
3015 edits.push((range, new_text));
3016 }
3017 log::info!("mutating buffer {:?} with {:?}", self.replica_id(), edits);
3018 self.edit(edits, None, cx);
3019 }
3020
3021 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut Context<Self>) {
3022 let was_dirty = self.is_dirty();
3023 let old_version = self.version.clone();
3024
3025 let ops = self.text.randomly_undo_redo(rng);
3026 if !ops.is_empty() {
3027 for op in ops {
3028 self.send_operation(Operation::Buffer(op), true, cx);
3029 self.did_edit(&old_version, was_dirty, cx);
3030 }
3031 }
3032 }
3033}
3034
3035impl EventEmitter<BufferEvent> for Buffer {}
3036
3037impl Deref for Buffer {
3038 type Target = TextBuffer;
3039
3040 fn deref(&self) -> &Self::Target {
3041 &self.text
3042 }
3043}
3044
3045impl BufferSnapshot {
3046 /// Returns [`IndentSize`] for a given line that respects user settings and
3047 /// language preferences.
3048 pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
3049 indent_size_for_line(self, row)
3050 }
3051
3052 /// Returns [`IndentSize`] for a given position that respects user settings
3053 /// and language preferences.
3054 pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &App) -> IndentSize {
3055 let settings = language_settings(
3056 self.language_at(position).map(|l| l.name()),
3057 self.file(),
3058 cx,
3059 );
3060 if settings.hard_tabs {
3061 IndentSize::tab()
3062 } else {
3063 IndentSize::spaces(settings.tab_size.get())
3064 }
3065 }
3066
3067 /// Retrieve the suggested indent size for all of the given rows. The unit of indentation
3068 /// is passed in as `single_indent_size`.
3069 pub fn suggested_indents(
3070 &self,
3071 rows: impl Iterator<Item = u32>,
3072 single_indent_size: IndentSize,
3073 ) -> BTreeMap<u32, IndentSize> {
3074 let mut result = BTreeMap::new();
3075
3076 for row_range in contiguous_ranges(rows, 10) {
3077 let suggestions = match self.suggest_autoindents(row_range.clone()) {
3078 Some(suggestions) => suggestions,
3079 _ => break,
3080 };
3081
3082 for (row, suggestion) in row_range.zip(suggestions) {
3083 let indent_size = if let Some(suggestion) = suggestion {
3084 result
3085 .get(&suggestion.basis_row)
3086 .copied()
3087 .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
3088 .with_delta(suggestion.delta, single_indent_size)
3089 } else {
3090 self.indent_size_for_line(row)
3091 };
3092
3093 result.insert(row, indent_size);
3094 }
3095 }
3096
3097 result
3098 }
3099
3100 fn suggest_autoindents(
3101 &self,
3102 row_range: Range<u32>,
3103 ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
3104 let config = &self.language.as_ref()?.config;
3105 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
3106
3107 #[derive(Debug, Clone)]
3108 struct StartPosition {
3109 start: Point,
3110 suffix: SharedString,
3111 }
3112
3113 // Find the suggested indentation ranges based on the syntax tree.
3114 let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
3115 let end = Point::new(row_range.end, 0);
3116 let range = (start..end).to_offset(&self.text);
3117 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3118 Some(&grammar.indents_config.as_ref()?.query)
3119 });
3120 let indent_configs = matches
3121 .grammars()
3122 .iter()
3123 .map(|grammar| grammar.indents_config.as_ref().unwrap())
3124 .collect::<Vec<_>>();
3125
3126 let mut indent_ranges = Vec::<Range<Point>>::new();
3127 let mut start_positions = Vec::<StartPosition>::new();
3128 let mut outdent_positions = Vec::<Point>::new();
3129 while let Some(mat) = matches.peek() {
3130 let mut start: Option<Point> = None;
3131 let mut end: Option<Point> = None;
3132
3133 let config = indent_configs[mat.grammar_index];
3134 for capture in mat.captures {
3135 if capture.index == config.indent_capture_ix {
3136 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
3137 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
3138 } else if Some(capture.index) == config.start_capture_ix {
3139 start = Some(Point::from_ts_point(capture.node.end_position()));
3140 } else if Some(capture.index) == config.end_capture_ix {
3141 end = Some(Point::from_ts_point(capture.node.start_position()));
3142 } else if Some(capture.index) == config.outdent_capture_ix {
3143 outdent_positions.push(Point::from_ts_point(capture.node.start_position()));
3144 } else if let Some(suffix) = config.suffixed_start_captures.get(&capture.index) {
3145 start_positions.push(StartPosition {
3146 start: Point::from_ts_point(capture.node.start_position()),
3147 suffix: suffix.clone(),
3148 });
3149 }
3150 }
3151
3152 matches.advance();
3153 if let Some((start, end)) = start.zip(end) {
3154 if start.row == end.row {
3155 continue;
3156 }
3157 let range = start..end;
3158 match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
3159 Err(ix) => indent_ranges.insert(ix, range),
3160 Ok(ix) => {
3161 let prev_range = &mut indent_ranges[ix];
3162 prev_range.end = prev_range.end.max(range.end);
3163 }
3164 }
3165 }
3166 }
3167
3168 let mut error_ranges = Vec::<Range<Point>>::new();
3169 let mut matches = self
3170 .syntax
3171 .matches(range, &self.text, |grammar| grammar.error_query.as_ref());
3172 while let Some(mat) = matches.peek() {
3173 let node = mat.captures[0].node;
3174 let start = Point::from_ts_point(node.start_position());
3175 let end = Point::from_ts_point(node.end_position());
3176 let range = start..end;
3177 let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
3178 Ok(ix) | Err(ix) => ix,
3179 };
3180 let mut end_ix = ix;
3181 while let Some(existing_range) = error_ranges.get(end_ix) {
3182 if existing_range.end < end {
3183 end_ix += 1;
3184 } else {
3185 break;
3186 }
3187 }
3188 error_ranges.splice(ix..end_ix, [range]);
3189 matches.advance();
3190 }
3191
3192 outdent_positions.sort();
3193 for outdent_position in outdent_positions {
3194 // find the innermost indent range containing this outdent_position
3195 // set its end to the outdent position
3196 if let Some(range_to_truncate) = indent_ranges
3197 .iter_mut()
3198 .filter(|indent_range| indent_range.contains(&outdent_position))
3199 .next_back()
3200 {
3201 range_to_truncate.end = outdent_position;
3202 }
3203 }
3204
3205 start_positions.sort_by_key(|b| b.start);
3206
3207 // Find the suggested indentation increases and decreased based on regexes.
3208 let mut regex_outdent_map = HashMap::default();
3209 let mut last_seen_suffix: HashMap<String, Vec<Point>> = HashMap::default();
3210 let mut start_positions_iter = start_positions.iter().peekable();
3211
3212 let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
3213 self.for_each_line(
3214 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
3215 ..Point::new(row_range.end, 0),
3216 |row, line| {
3217 if config
3218 .decrease_indent_pattern
3219 .as_ref()
3220 .is_some_and(|regex| regex.is_match(line))
3221 {
3222 indent_change_rows.push((row, Ordering::Less));
3223 }
3224 if config
3225 .increase_indent_pattern
3226 .as_ref()
3227 .is_some_and(|regex| regex.is_match(line))
3228 {
3229 indent_change_rows.push((row + 1, Ordering::Greater));
3230 }
3231 while let Some(pos) = start_positions_iter.peek() {
3232 if pos.start.row < row {
3233 let pos = start_positions_iter.next().unwrap();
3234 last_seen_suffix
3235 .entry(pos.suffix.to_string())
3236 .or_default()
3237 .push(pos.start);
3238 } else {
3239 break;
3240 }
3241 }
3242 for rule in &config.decrease_indent_patterns {
3243 if rule.pattern.as_ref().is_some_and(|r| r.is_match(line)) {
3244 let row_start_column = self.indent_size_for_line(row).len;
3245 let basis_row = rule
3246 .valid_after
3247 .iter()
3248 .filter_map(|valid_suffix| last_seen_suffix.get(valid_suffix))
3249 .flatten()
3250 .filter(|start_point| start_point.column <= row_start_column)
3251 .max_by_key(|start_point| start_point.row);
3252 if let Some(outdent_to_row) = basis_row {
3253 regex_outdent_map.insert(row, outdent_to_row.row);
3254 }
3255 break;
3256 }
3257 }
3258 },
3259 );
3260
3261 let mut indent_changes = indent_change_rows.into_iter().peekable();
3262 let mut prev_row = if config.auto_indent_using_last_non_empty_line {
3263 prev_non_blank_row.unwrap_or(0)
3264 } else {
3265 row_range.start.saturating_sub(1)
3266 };
3267
3268 let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
3269 Some(row_range.map(move |row| {
3270 let row_start = Point::new(row, self.indent_size_for_line(row).len);
3271
3272 let mut indent_from_prev_row = false;
3273 let mut outdent_from_prev_row = false;
3274 let mut outdent_to_row = u32::MAX;
3275 let mut from_regex = false;
3276
3277 while let Some((indent_row, delta)) = indent_changes.peek() {
3278 match indent_row.cmp(&row) {
3279 Ordering::Equal => match delta {
3280 Ordering::Less => {
3281 from_regex = true;
3282 outdent_from_prev_row = true
3283 }
3284 Ordering::Greater => {
3285 indent_from_prev_row = true;
3286 from_regex = true
3287 }
3288 _ => {}
3289 },
3290
3291 Ordering::Greater => break,
3292 Ordering::Less => {}
3293 }
3294
3295 indent_changes.next();
3296 }
3297
3298 for range in &indent_ranges {
3299 if range.start.row >= row {
3300 break;
3301 }
3302 if range.start.row == prev_row && range.end > row_start {
3303 indent_from_prev_row = true;
3304 }
3305 if range.end > prev_row_start && range.end <= row_start {
3306 outdent_to_row = outdent_to_row.min(range.start.row);
3307 }
3308 }
3309
3310 if let Some(basis_row) = regex_outdent_map.get(&row) {
3311 indent_from_prev_row = false;
3312 outdent_to_row = *basis_row;
3313 from_regex = true;
3314 }
3315
3316 let within_error = error_ranges
3317 .iter()
3318 .any(|e| e.start.row < row && e.end > row_start);
3319
3320 let suggestion = if outdent_to_row == prev_row
3321 || (outdent_from_prev_row && indent_from_prev_row)
3322 {
3323 Some(IndentSuggestion {
3324 basis_row: prev_row,
3325 delta: Ordering::Equal,
3326 within_error: within_error && !from_regex,
3327 })
3328 } else if indent_from_prev_row {
3329 Some(IndentSuggestion {
3330 basis_row: prev_row,
3331 delta: Ordering::Greater,
3332 within_error: within_error && !from_regex,
3333 })
3334 } else if outdent_to_row < prev_row {
3335 Some(IndentSuggestion {
3336 basis_row: outdent_to_row,
3337 delta: Ordering::Equal,
3338 within_error: within_error && !from_regex,
3339 })
3340 } else if outdent_from_prev_row {
3341 Some(IndentSuggestion {
3342 basis_row: prev_row,
3343 delta: Ordering::Less,
3344 within_error: within_error && !from_regex,
3345 })
3346 } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
3347 {
3348 Some(IndentSuggestion {
3349 basis_row: prev_row,
3350 delta: Ordering::Equal,
3351 within_error: within_error && !from_regex,
3352 })
3353 } else {
3354 None
3355 };
3356
3357 prev_row = row;
3358 prev_row_start = row_start;
3359 suggestion
3360 }))
3361 }
3362
3363 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
3364 while row > 0 {
3365 row -= 1;
3366 if !self.is_line_blank(row) {
3367 return Some(row);
3368 }
3369 }
3370 None
3371 }
3372
3373 fn get_highlights(&self, range: Range<usize>) -> (SyntaxMapCaptures<'_>, Vec<HighlightMap>) {
3374 let captures = self.syntax.captures(range, &self.text, |grammar| {
3375 grammar
3376 .highlights_config
3377 .as_ref()
3378 .map(|config| &config.query)
3379 });
3380 let highlight_maps = captures
3381 .grammars()
3382 .iter()
3383 .map(|grammar| grammar.highlight_map())
3384 .collect();
3385 (captures, highlight_maps)
3386 }
3387
3388 /// Iterates over chunks of text in the given range of the buffer. Text is chunked
3389 /// in an arbitrary way due to being stored in a [`Rope`](text::Rope). The text is also
3390 /// returned in chunks where each chunk has a single syntax highlighting style and
3391 /// diagnostic status.
3392 pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks<'_> {
3393 let range = range.start.to_offset(self)..range.end.to_offset(self);
3394
3395 let mut syntax = None;
3396 if language_aware {
3397 syntax = Some(self.get_highlights(range.clone()));
3398 }
3399 // We want to look at diagnostic spans only when iterating over language-annotated chunks.
3400 let diagnostics = language_aware;
3401 BufferChunks::new(self.text.as_rope(), range, syntax, diagnostics, Some(self))
3402 }
3403
3404 pub fn highlighted_text_for_range<T: ToOffset>(
3405 &self,
3406 range: Range<T>,
3407 override_style: Option<HighlightStyle>,
3408 syntax_theme: &SyntaxTheme,
3409 ) -> HighlightedText {
3410 HighlightedText::from_buffer_range(
3411 range,
3412 &self.text,
3413 &self.syntax,
3414 override_style,
3415 syntax_theme,
3416 )
3417 }
3418
3419 /// Invokes the given callback for each line of text in the given range of the buffer.
3420 /// Uses callback to avoid allocating a string for each line.
3421 fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
3422 let mut line = String::new();
3423 let mut row = range.start.row;
3424 for chunk in self
3425 .as_rope()
3426 .chunks_in_range(range.to_offset(self))
3427 .chain(["\n"])
3428 {
3429 for (newline_ix, text) in chunk.split('\n').enumerate() {
3430 if newline_ix > 0 {
3431 callback(row, &line);
3432 row += 1;
3433 line.clear();
3434 }
3435 line.push_str(text);
3436 }
3437 }
3438 }
3439
3440 /// Iterates over every [`SyntaxLayer`] in the buffer.
3441 pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3442 self.syntax_layers_for_range(0..self.len(), true)
3443 }
3444
3445 pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayer<'_>> {
3446 let offset = position.to_offset(self);
3447 self.syntax_layers_for_range(offset..offset, false)
3448 .filter(|l| l.node().end_byte() > offset)
3449 .last()
3450 }
3451
3452 pub fn syntax_layers_for_range<D: ToOffset>(
3453 &self,
3454 range: Range<D>,
3455 include_hidden: bool,
3456 ) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3457 self.syntax
3458 .layers_for_range(range, &self.text, include_hidden)
3459 }
3460
3461 pub fn smallest_syntax_layer_containing<D: ToOffset>(
3462 &self,
3463 range: Range<D>,
3464 ) -> Option<SyntaxLayer<'_>> {
3465 let range = range.to_offset(self);
3466 self.syntax
3467 .layers_for_range(range, &self.text, false)
3468 .max_by(|a, b| {
3469 if a.depth != b.depth {
3470 a.depth.cmp(&b.depth)
3471 } else if a.offset.0 != b.offset.0 {
3472 a.offset.0.cmp(&b.offset.0)
3473 } else {
3474 a.node().end_byte().cmp(&b.node().end_byte()).reverse()
3475 }
3476 })
3477 }
3478
3479 /// Returns the main [`Language`].
3480 pub fn language(&self) -> Option<&Arc<Language>> {
3481 self.language.as_ref()
3482 }
3483
3484 /// Returns the [`Language`] at the given location.
3485 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
3486 self.syntax_layer_at(position)
3487 .map(|info| info.language)
3488 .or(self.language.as_ref())
3489 }
3490
3491 /// Returns the settings for the language at the given location.
3492 pub fn settings_at<'a, D: ToOffset>(
3493 &'a self,
3494 position: D,
3495 cx: &'a App,
3496 ) -> Cow<'a, LanguageSettings> {
3497 language_settings(
3498 self.language_at(position).map(|l| l.name()),
3499 self.file.as_ref(),
3500 cx,
3501 )
3502 }
3503
3504 pub fn char_classifier_at<T: ToOffset>(&self, point: T) -> CharClassifier {
3505 CharClassifier::new(self.language_scope_at(point))
3506 }
3507
3508 /// Returns the [`LanguageScope`] at the given location.
3509 pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
3510 let offset = position.to_offset(self);
3511 let mut scope = None;
3512 let mut smallest_range_and_depth: Option<(Range<usize>, usize)> = None;
3513
3514 // Use the layer that has the smallest node intersecting the given point.
3515 for layer in self
3516 .syntax
3517 .layers_for_range(offset..offset, &self.text, false)
3518 {
3519 let mut cursor = layer.node().walk();
3520
3521 let mut range = None;
3522 loop {
3523 let child_range = cursor.node().byte_range();
3524 if !child_range.contains(&offset) {
3525 break;
3526 }
3527
3528 range = Some(child_range);
3529 if cursor.goto_first_child_for_byte(offset).is_none() {
3530 break;
3531 }
3532 }
3533
3534 if let Some(range) = range
3535 && smallest_range_and_depth.as_ref().is_none_or(
3536 |(smallest_range, smallest_range_depth)| {
3537 if layer.depth > *smallest_range_depth {
3538 true
3539 } else if layer.depth == *smallest_range_depth {
3540 range.len() < smallest_range.len()
3541 } else {
3542 false
3543 }
3544 },
3545 )
3546 {
3547 smallest_range_and_depth = Some((range, layer.depth));
3548 scope = Some(LanguageScope {
3549 language: layer.language.clone(),
3550 override_id: layer.override_id(offset, &self.text),
3551 });
3552 }
3553 }
3554
3555 scope.or_else(|| {
3556 self.language.clone().map(|language| LanguageScope {
3557 language,
3558 override_id: None,
3559 })
3560 })
3561 }
3562
3563 /// Returns a tuple of the range and character kind of the word
3564 /// surrounding the given position.
3565 pub fn surrounding_word<T: ToOffset>(
3566 &self,
3567 start: T,
3568 scope_context: Option<CharScopeContext>,
3569 ) -> (Range<usize>, Option<CharKind>) {
3570 let mut start = start.to_offset(self);
3571 let mut end = start;
3572 let mut next_chars = self.chars_at(start).take(128).peekable();
3573 let mut prev_chars = self.reversed_chars_at(start).take(128).peekable();
3574
3575 let classifier = self.char_classifier_at(start).scope_context(scope_context);
3576 let word_kind = cmp::max(
3577 prev_chars.peek().copied().map(|c| classifier.kind(c)),
3578 next_chars.peek().copied().map(|c| classifier.kind(c)),
3579 );
3580
3581 for ch in prev_chars {
3582 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3583 start -= ch.len_utf8();
3584 } else {
3585 break;
3586 }
3587 }
3588
3589 for ch in next_chars {
3590 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3591 end += ch.len_utf8();
3592 } else {
3593 break;
3594 }
3595 }
3596
3597 (start..end, word_kind)
3598 }
3599
3600 /// Moves the TreeCursor to the smallest descendant or ancestor syntax node enclosing the given
3601 /// range. When `require_larger` is true, the node found must be larger than the query range.
3602 ///
3603 /// Returns true if a node was found, and false otherwise. In the `false` case the cursor will
3604 /// be moved to the root of the tree.
3605 fn goto_node_enclosing_range(
3606 cursor: &mut tree_sitter::TreeCursor,
3607 query_range: &Range<usize>,
3608 require_larger: bool,
3609 ) -> bool {
3610 let mut ascending = false;
3611 loop {
3612 let mut range = cursor.node().byte_range();
3613 if query_range.is_empty() {
3614 // When the query range is empty and the current node starts after it, move to the
3615 // previous sibling to find the node the containing node.
3616 if range.start > query_range.start {
3617 cursor.goto_previous_sibling();
3618 range = cursor.node().byte_range();
3619 }
3620 } else {
3621 // When the query range is non-empty and the current node ends exactly at the start,
3622 // move to the next sibling to find a node that extends beyond the start.
3623 if range.end == query_range.start {
3624 cursor.goto_next_sibling();
3625 range = cursor.node().byte_range();
3626 }
3627 }
3628
3629 let encloses = range.contains_inclusive(query_range)
3630 && (!require_larger || range.len() > query_range.len());
3631 if !encloses {
3632 ascending = true;
3633 if !cursor.goto_parent() {
3634 return false;
3635 }
3636 continue;
3637 } else if ascending {
3638 return true;
3639 }
3640
3641 // Descend into the current node.
3642 if cursor
3643 .goto_first_child_for_byte(query_range.start)
3644 .is_none()
3645 {
3646 return true;
3647 }
3648 }
3649 }
3650
3651 pub fn syntax_ancestor<'a, T: ToOffset>(
3652 &'a self,
3653 range: Range<T>,
3654 ) -> Option<tree_sitter::Node<'a>> {
3655 let range = range.start.to_offset(self)..range.end.to_offset(self);
3656 let mut result: Option<tree_sitter::Node<'a>> = None;
3657 for layer in self
3658 .syntax
3659 .layers_for_range(range.clone(), &self.text, true)
3660 {
3661 let mut cursor = layer.node().walk();
3662
3663 // Find the node that both contains the range and is larger than it.
3664 if !Self::goto_node_enclosing_range(&mut cursor, &range, true) {
3665 continue;
3666 }
3667
3668 let left_node = cursor.node();
3669 let mut layer_result = left_node;
3670
3671 // For an empty range, try to find another node immediately to the right of the range.
3672 if left_node.end_byte() == range.start {
3673 let mut right_node = None;
3674 while !cursor.goto_next_sibling() {
3675 if !cursor.goto_parent() {
3676 break;
3677 }
3678 }
3679
3680 while cursor.node().start_byte() == range.start {
3681 right_node = Some(cursor.node());
3682 if !cursor.goto_first_child() {
3683 break;
3684 }
3685 }
3686
3687 // If there is a candidate node on both sides of the (empty) range, then
3688 // decide between the two by favoring a named node over an anonymous token.
3689 // If both nodes are the same in that regard, favor the right one.
3690 if let Some(right_node) = right_node
3691 && (right_node.is_named() || !left_node.is_named())
3692 {
3693 layer_result = right_node;
3694 }
3695 }
3696
3697 if let Some(previous_result) = &result
3698 && previous_result.byte_range().len() < layer_result.byte_range().len()
3699 {
3700 continue;
3701 }
3702 result = Some(layer_result);
3703 }
3704
3705 result
3706 }
3707
3708 /// Find the previous sibling syntax node at the given range.
3709 ///
3710 /// This function locates the syntax node that precedes the node containing
3711 /// the given range. It searches hierarchically by:
3712 /// 1. Finding the node that contains the given range
3713 /// 2. Looking for the previous sibling at the same tree level
3714 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
3715 ///
3716 /// Returns `None` if there is no previous sibling at any ancestor level.
3717 pub fn syntax_prev_sibling<'a, T: ToOffset>(
3718 &'a self,
3719 range: Range<T>,
3720 ) -> Option<tree_sitter::Node<'a>> {
3721 let range = range.start.to_offset(self)..range.end.to_offset(self);
3722 let mut result: Option<tree_sitter::Node<'a>> = None;
3723
3724 for layer in self
3725 .syntax
3726 .layers_for_range(range.clone(), &self.text, true)
3727 {
3728 let mut cursor = layer.node().walk();
3729
3730 // Find the node that contains the range
3731 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
3732 continue;
3733 }
3734
3735 // Look for the previous sibling, moving up ancestor levels if needed
3736 loop {
3737 if cursor.goto_previous_sibling() {
3738 let layer_result = cursor.node();
3739
3740 if let Some(previous_result) = &result {
3741 if previous_result.byte_range().end < layer_result.byte_range().end {
3742 continue;
3743 }
3744 }
3745 result = Some(layer_result);
3746 break;
3747 }
3748
3749 // No sibling found at this level, try moving up to parent
3750 if !cursor.goto_parent() {
3751 break;
3752 }
3753 }
3754 }
3755
3756 result
3757 }
3758
3759 /// Find the next sibling syntax node at the given range.
3760 ///
3761 /// This function locates the syntax node that follows the node containing
3762 /// the given range. It searches hierarchically by:
3763 /// 1. Finding the node that contains the given range
3764 /// 2. Looking for the next sibling at the same tree level
3765 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
3766 ///
3767 /// Returns `None` if there is no next sibling at any ancestor level.
3768 pub fn syntax_next_sibling<'a, T: ToOffset>(
3769 &'a self,
3770 range: Range<T>,
3771 ) -> Option<tree_sitter::Node<'a>> {
3772 let range = range.start.to_offset(self)..range.end.to_offset(self);
3773 let mut result: Option<tree_sitter::Node<'a>> = None;
3774
3775 for layer in self
3776 .syntax
3777 .layers_for_range(range.clone(), &self.text, true)
3778 {
3779 let mut cursor = layer.node().walk();
3780
3781 // Find the node that contains the range
3782 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
3783 continue;
3784 }
3785
3786 // Look for the next sibling, moving up ancestor levels if needed
3787 loop {
3788 if cursor.goto_next_sibling() {
3789 let layer_result = cursor.node();
3790
3791 if let Some(previous_result) = &result {
3792 if previous_result.byte_range().start > layer_result.byte_range().start {
3793 continue;
3794 }
3795 }
3796 result = Some(layer_result);
3797 break;
3798 }
3799
3800 // No sibling found at this level, try moving up to parent
3801 if !cursor.goto_parent() {
3802 break;
3803 }
3804 }
3805 }
3806
3807 result
3808 }
3809
3810 /// Returns the root syntax node within the given row
3811 pub fn syntax_root_ancestor(&self, position: Anchor) -> Option<tree_sitter::Node<'_>> {
3812 let start_offset = position.to_offset(self);
3813
3814 let row = self.summary_for_anchor::<text::PointUtf16>(&position).row as usize;
3815
3816 let layer = self
3817 .syntax
3818 .layers_for_range(start_offset..start_offset, &self.text, true)
3819 .next()?;
3820
3821 let mut cursor = layer.node().walk();
3822
3823 // Descend to the first leaf that touches the start of the range.
3824 while cursor.goto_first_child_for_byte(start_offset).is_some() {
3825 if cursor.node().end_byte() == start_offset {
3826 cursor.goto_next_sibling();
3827 }
3828 }
3829
3830 // Ascend to the root node within the same row.
3831 while cursor.goto_parent() {
3832 if cursor.node().start_position().row != row {
3833 break;
3834 }
3835 }
3836
3837 Some(cursor.node())
3838 }
3839
3840 /// Returns the outline for the buffer.
3841 ///
3842 /// This method allows passing an optional [`SyntaxTheme`] to
3843 /// syntax-highlight the returned symbols.
3844 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Outline<Anchor> {
3845 Outline::new(self.outline_items_containing(0..self.len(), true, theme))
3846 }
3847
3848 /// Returns all the symbols that contain the given position.
3849 ///
3850 /// This method allows passing an optional [`SyntaxTheme`] to
3851 /// syntax-highlight the returned symbols.
3852 pub fn symbols_containing<T: ToOffset>(
3853 &self,
3854 position: T,
3855 theme: Option<&SyntaxTheme>,
3856 ) -> Vec<OutlineItem<Anchor>> {
3857 let position = position.to_offset(self);
3858 let start = self.clip_offset(position.saturating_sub(1), Bias::Left);
3859 let end = self.clip_offset(position + 1, Bias::Right);
3860 let mut items = self.outline_items_containing(start..end, false, theme);
3861 let mut prev_depth = None;
3862 items.retain(|item| {
3863 let result = prev_depth.is_none_or(|prev_depth| item.depth > prev_depth);
3864 prev_depth = Some(item.depth);
3865 result
3866 });
3867 items
3868 }
3869
3870 pub fn outline_range_containing<T: ToOffset>(&self, range: Range<T>) -> Option<Range<Point>> {
3871 let range = range.to_offset(self);
3872 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3873 grammar.outline_config.as_ref().map(|c| &c.query)
3874 });
3875 let configs = matches
3876 .grammars()
3877 .iter()
3878 .map(|g| g.outline_config.as_ref().unwrap())
3879 .collect::<Vec<_>>();
3880
3881 while let Some(mat) = matches.peek() {
3882 let config = &configs[mat.grammar_index];
3883 let containing_item_node = maybe!({
3884 let item_node = mat.captures.iter().find_map(|cap| {
3885 if cap.index == config.item_capture_ix {
3886 Some(cap.node)
3887 } else {
3888 None
3889 }
3890 })?;
3891
3892 let item_byte_range = item_node.byte_range();
3893 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3894 None
3895 } else {
3896 Some(item_node)
3897 }
3898 });
3899
3900 if let Some(item_node) = containing_item_node {
3901 return Some(
3902 Point::from_ts_point(item_node.start_position())
3903 ..Point::from_ts_point(item_node.end_position()),
3904 );
3905 }
3906
3907 matches.advance();
3908 }
3909 None
3910 }
3911
3912 pub fn outline_items_containing<T: ToOffset>(
3913 &self,
3914 range: Range<T>,
3915 include_extra_context: bool,
3916 theme: Option<&SyntaxTheme>,
3917 ) -> Vec<OutlineItem<Anchor>> {
3918 self.outline_items_containing_internal(
3919 range,
3920 include_extra_context,
3921 theme,
3922 |this, range| this.anchor_after(range.start)..this.anchor_before(range.end),
3923 )
3924 }
3925
3926 pub fn outline_items_as_points_containing<T: ToOffset>(
3927 &self,
3928 range: Range<T>,
3929 include_extra_context: bool,
3930 theme: Option<&SyntaxTheme>,
3931 ) -> Vec<OutlineItem<Point>> {
3932 self.outline_items_containing_internal(range, include_extra_context, theme, |_, range| {
3933 range
3934 })
3935 }
3936
3937 fn outline_items_containing_internal<T: ToOffset, U>(
3938 &self,
3939 range: Range<T>,
3940 include_extra_context: bool,
3941 theme: Option<&SyntaxTheme>,
3942 range_callback: fn(&Self, Range<Point>) -> Range<U>,
3943 ) -> Vec<OutlineItem<U>> {
3944 let range = range.to_offset(self);
3945 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3946 grammar.outline_config.as_ref().map(|c| &c.query)
3947 });
3948
3949 let mut items = Vec::new();
3950 let mut annotation_row_ranges: Vec<Range<u32>> = Vec::new();
3951 while let Some(mat) = matches.peek() {
3952 let config = matches.grammars()[mat.grammar_index]
3953 .outline_config
3954 .as_ref()
3955 .unwrap();
3956 if let Some(item) =
3957 self.next_outline_item(config, &mat, &range, include_extra_context, theme)
3958 {
3959 items.push(item);
3960 } else if let Some(capture) = mat
3961 .captures
3962 .iter()
3963 .find(|capture| Some(capture.index) == config.annotation_capture_ix)
3964 {
3965 let capture_range = capture.node.start_position()..capture.node.end_position();
3966 let mut capture_row_range =
3967 capture_range.start.row as u32..capture_range.end.row as u32;
3968 if capture_range.end.row > capture_range.start.row && capture_range.end.column == 0
3969 {
3970 capture_row_range.end -= 1;
3971 }
3972 if let Some(last_row_range) = annotation_row_ranges.last_mut() {
3973 if last_row_range.end >= capture_row_range.start.saturating_sub(1) {
3974 last_row_range.end = capture_row_range.end;
3975 } else {
3976 annotation_row_ranges.push(capture_row_range);
3977 }
3978 } else {
3979 annotation_row_ranges.push(capture_row_range);
3980 }
3981 }
3982 matches.advance();
3983 }
3984
3985 items.sort_by_key(|item| (item.range.start, Reverse(item.range.end)));
3986
3987 // Assign depths based on containment relationships and convert to anchors.
3988 let mut item_ends_stack = Vec::<Point>::new();
3989 let mut anchor_items = Vec::new();
3990 let mut annotation_row_ranges = annotation_row_ranges.into_iter().peekable();
3991 for item in items {
3992 while let Some(last_end) = item_ends_stack.last().copied() {
3993 if last_end < item.range.end {
3994 item_ends_stack.pop();
3995 } else {
3996 break;
3997 }
3998 }
3999
4000 let mut annotation_row_range = None;
4001 while let Some(next_annotation_row_range) = annotation_row_ranges.peek() {
4002 let row_preceding_item = item.range.start.row.saturating_sub(1);
4003 if next_annotation_row_range.end < row_preceding_item {
4004 annotation_row_ranges.next();
4005 } else {
4006 if next_annotation_row_range.end == row_preceding_item {
4007 annotation_row_range = Some(next_annotation_row_range.clone());
4008 annotation_row_ranges.next();
4009 }
4010 break;
4011 }
4012 }
4013
4014 anchor_items.push(OutlineItem {
4015 depth: item_ends_stack.len(),
4016 range: range_callback(self, item.range.clone()),
4017 source_range_for_text: range_callback(self, item.source_range_for_text.clone()),
4018 text: item.text,
4019 highlight_ranges: item.highlight_ranges,
4020 name_ranges: item.name_ranges,
4021 body_range: item.body_range.map(|r| range_callback(self, r)),
4022 annotation_range: annotation_row_range.map(|annotation_range| {
4023 let point_range = Point::new(annotation_range.start, 0)
4024 ..Point::new(annotation_range.end, self.line_len(annotation_range.end));
4025 range_callback(self, point_range)
4026 }),
4027 });
4028 item_ends_stack.push(item.range.end);
4029 }
4030
4031 anchor_items
4032 }
4033
4034 fn next_outline_item(
4035 &self,
4036 config: &OutlineConfig,
4037 mat: &SyntaxMapMatch,
4038 range: &Range<usize>,
4039 include_extra_context: bool,
4040 theme: Option<&SyntaxTheme>,
4041 ) -> Option<OutlineItem<Point>> {
4042 let item_node = mat.captures.iter().find_map(|cap| {
4043 if cap.index == config.item_capture_ix {
4044 Some(cap.node)
4045 } else {
4046 None
4047 }
4048 })?;
4049
4050 let item_byte_range = item_node.byte_range();
4051 if item_byte_range.end < range.start || item_byte_range.start > range.end {
4052 return None;
4053 }
4054 let item_point_range = Point::from_ts_point(item_node.start_position())
4055 ..Point::from_ts_point(item_node.end_position());
4056
4057 let mut open_point = None;
4058 let mut close_point = None;
4059
4060 let mut buffer_ranges = Vec::new();
4061 let mut add_to_buffer_ranges = |node: tree_sitter::Node, node_is_name| {
4062 let mut range = node.start_byte()..node.end_byte();
4063 let start = node.start_position();
4064 if node.end_position().row > start.row {
4065 range.end = range.start + self.line_len(start.row as u32) as usize - start.column;
4066 }
4067
4068 if !range.is_empty() {
4069 buffer_ranges.push((range, node_is_name));
4070 }
4071 };
4072
4073 for capture in mat.captures {
4074 if capture.index == config.name_capture_ix {
4075 add_to_buffer_ranges(capture.node, true);
4076 } else if Some(capture.index) == config.context_capture_ix
4077 || (Some(capture.index) == config.extra_context_capture_ix && include_extra_context)
4078 {
4079 add_to_buffer_ranges(capture.node, false);
4080 } else {
4081 if Some(capture.index) == config.open_capture_ix {
4082 open_point = Some(Point::from_ts_point(capture.node.end_position()));
4083 } else if Some(capture.index) == config.close_capture_ix {
4084 close_point = Some(Point::from_ts_point(capture.node.start_position()));
4085 }
4086 }
4087 }
4088
4089 if buffer_ranges.is_empty() {
4090 return None;
4091 }
4092 let source_range_for_text =
4093 buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end;
4094
4095 let mut text = String::new();
4096 let mut highlight_ranges = Vec::new();
4097 let mut name_ranges = Vec::new();
4098 let mut chunks = self.chunks(source_range_for_text.clone(), true);
4099 let mut last_buffer_range_end = 0;
4100 for (buffer_range, is_name) in buffer_ranges {
4101 let space_added = !text.is_empty() && buffer_range.start > last_buffer_range_end;
4102 if space_added {
4103 text.push(' ');
4104 }
4105 let before_append_len = text.len();
4106 let mut offset = buffer_range.start;
4107 chunks.seek(buffer_range.clone());
4108 for mut chunk in chunks.by_ref() {
4109 if chunk.text.len() > buffer_range.end - offset {
4110 chunk.text = &chunk.text[0..(buffer_range.end - offset)];
4111 offset = buffer_range.end;
4112 } else {
4113 offset += chunk.text.len();
4114 }
4115 let style = chunk
4116 .syntax_highlight_id
4117 .zip(theme)
4118 .and_then(|(highlight, theme)| highlight.style(theme));
4119 if let Some(style) = style {
4120 let start = text.len();
4121 let end = start + chunk.text.len();
4122 highlight_ranges.push((start..end, style));
4123 }
4124 text.push_str(chunk.text);
4125 if offset >= buffer_range.end {
4126 break;
4127 }
4128 }
4129 if is_name {
4130 let after_append_len = text.len();
4131 let start = if space_added && !name_ranges.is_empty() {
4132 before_append_len - 1
4133 } else {
4134 before_append_len
4135 };
4136 name_ranges.push(start..after_append_len);
4137 }
4138 last_buffer_range_end = buffer_range.end;
4139 }
4140
4141 Some(OutlineItem {
4142 depth: 0, // We'll calculate the depth later
4143 range: item_point_range,
4144 source_range_for_text: source_range_for_text.to_point(self),
4145 text,
4146 highlight_ranges,
4147 name_ranges,
4148 body_range: open_point.zip(close_point).map(|(start, end)| start..end),
4149 annotation_range: None,
4150 })
4151 }
4152
4153 pub fn function_body_fold_ranges<T: ToOffset>(
4154 &self,
4155 within: Range<T>,
4156 ) -> impl Iterator<Item = Range<usize>> + '_ {
4157 self.text_object_ranges(within, TreeSitterOptions::default())
4158 .filter_map(|(range, obj)| (obj == TextObject::InsideFunction).then_some(range))
4159 }
4160
4161 /// For each grammar in the language, runs the provided
4162 /// [`tree_sitter::Query`] against the given range.
4163 pub fn matches(
4164 &self,
4165 range: Range<usize>,
4166 query: fn(&Grammar) -> Option<&tree_sitter::Query>,
4167 ) -> SyntaxMapMatches<'_> {
4168 self.syntax.matches(range, self, query)
4169 }
4170
4171 /// TODO kb docs: this is an unordered collection of matches
4172 fn fetch_bracket_ranges(&self, range: Range<usize>) -> Vec<BracketMatch> {
4173 let mut tree_sitter_data = self.tree_sitter_data.write();
4174 if self
4175 .version
4176 .changed_since(&tree_sitter_data.data_for_version)
4177 {
4178 *tree_sitter_data = TreeSitterData::from_buffer_range(
4179 (0..self.len()).to_point(self),
4180 self.version().clone(),
4181 );
4182 }
4183
4184 let point_range = range.to_point(self);
4185 let row_range = point_range.start.row..=point_range.end.row;
4186 let applicable_chunks = tree_sitter_data
4187 .chunks
4188 .iter()
4189 .filter(move |buffer_chunk| {
4190 let chunk_row_range = buffer_chunk.start..buffer_chunk.end;
4191 chunk_row_range.contains(&row_range.start())
4192 || chunk_row_range.contains(&row_range.end())
4193 })
4194 .copied()
4195 .collect::<Vec<_>>();
4196
4197 let mut all_bracket_matches = Vec::new();
4198 for chunk in applicable_chunks {
4199 let chunk_brackets = &mut tree_sitter_data.brackets_by_chunks[chunk.id];
4200 let bracket_matches = match chunk_brackets {
4201 Some(cached_brackets) => cached_brackets.clone(),
4202 None => {
4203 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
4204 grammar.brackets_config.as_ref().map(|c| &c.query)
4205 });
4206 let configs = matches
4207 .grammars()
4208 .iter()
4209 .map(|grammar| grammar.brackets_config.as_ref().unwrap())
4210 .collect::<Vec<_>>();
4211
4212 // todo!
4213 let mut depth = 0;
4214 let range = range.clone();
4215 let new_matches = iter::from_fn(move || {
4216 while let Some(mat) = matches.peek() {
4217 let mut open = None;
4218 let mut close = None;
4219 let config = configs[mat.grammar_index];
4220 let pattern = &config.patterns[mat.pattern_index];
4221 for capture in mat.captures {
4222 if capture.index == config.open_capture_ix {
4223 open = Some(capture.node.byte_range());
4224 } else if capture.index == config.close_capture_ix {
4225 close = Some(capture.node.byte_range());
4226 }
4227 }
4228
4229 matches.advance();
4230
4231 let Some((open_range, close_range)) = open.zip(close) else {
4232 continue;
4233 };
4234
4235 let bracket_range = open_range.start..=close_range.end;
4236 if !bracket_range.overlaps(&range) {
4237 continue;
4238 }
4239
4240 depth += 1;
4241
4242 return Some(BracketMatch {
4243 open_range,
4244 close_range,
4245 newline_only: pattern.newline_only,
4246 depth,
4247 });
4248 }
4249 None
4250 })
4251 .collect::<Vec<_>>();
4252 *chunk_brackets = Some(new_matches.clone());
4253 new_matches
4254 }
4255 };
4256 all_bracket_matches.extend(bracket_matches);
4257 }
4258
4259 all_bracket_matches
4260 }
4261
4262 pub fn all_bracket_ranges(&self, range: Range<usize>) -> Vec<BracketMatch> {
4263 self.fetch_bracket_ranges(range)
4264 }
4265
4266 /// Returns bracket range pairs overlapping or adjacent to `range`
4267 pub fn bracket_ranges<T: ToOffset>(
4268 &self,
4269 range: Range<T>,
4270 ) -> impl Iterator<Item = BracketMatch> + '_ {
4271 // Find bracket pairs that *inclusively* contain the given range.
4272 let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self);
4273 self.all_bracket_ranges(range)
4274 .into_iter()
4275 .filter(|pair| !pair.newline_only)
4276 }
4277
4278 pub fn debug_variables_query<T: ToOffset>(
4279 &self,
4280 range: Range<T>,
4281 ) -> impl Iterator<Item = (Range<usize>, DebuggerTextObject)> + '_ {
4282 let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self);
4283
4284 let mut matches = self.syntax.matches_with_options(
4285 range.clone(),
4286 &self.text,
4287 TreeSitterOptions::default(),
4288 |grammar| grammar.debug_variables_config.as_ref().map(|c| &c.query),
4289 );
4290
4291 let configs = matches
4292 .grammars()
4293 .iter()
4294 .map(|grammar| grammar.debug_variables_config.as_ref())
4295 .collect::<Vec<_>>();
4296
4297 let mut captures = Vec::<(Range<usize>, DebuggerTextObject)>::new();
4298
4299 iter::from_fn(move || {
4300 loop {
4301 while let Some(capture) = captures.pop() {
4302 if capture.0.overlaps(&range) {
4303 return Some(capture);
4304 }
4305 }
4306
4307 let mat = matches.peek()?;
4308
4309 let Some(config) = configs[mat.grammar_index].as_ref() else {
4310 matches.advance();
4311 continue;
4312 };
4313
4314 for capture in mat.captures {
4315 let Some(ix) = config
4316 .objects_by_capture_ix
4317 .binary_search_by_key(&capture.index, |e| e.0)
4318 .ok()
4319 else {
4320 continue;
4321 };
4322 let text_object = config.objects_by_capture_ix[ix].1;
4323 let byte_range = capture.node.byte_range();
4324
4325 let mut found = false;
4326 for (range, existing) in captures.iter_mut() {
4327 if existing == &text_object {
4328 range.start = range.start.min(byte_range.start);
4329 range.end = range.end.max(byte_range.end);
4330 found = true;
4331 break;
4332 }
4333 }
4334
4335 if !found {
4336 captures.push((byte_range, text_object));
4337 }
4338 }
4339
4340 matches.advance();
4341 }
4342 })
4343 }
4344
4345 pub fn text_object_ranges<T: ToOffset>(
4346 &self,
4347 range: Range<T>,
4348 options: TreeSitterOptions,
4349 ) -> impl Iterator<Item = (Range<usize>, TextObject)> + '_ {
4350 let range =
4351 range.start.to_previous_offset(self)..self.len().min(range.end.to_next_offset(self));
4352
4353 let mut matches =
4354 self.syntax
4355 .matches_with_options(range.clone(), &self.text, options, |grammar| {
4356 grammar.text_object_config.as_ref().map(|c| &c.query)
4357 });
4358
4359 let configs = matches
4360 .grammars()
4361 .iter()
4362 .map(|grammar| grammar.text_object_config.as_ref())
4363 .collect::<Vec<_>>();
4364
4365 let mut captures = Vec::<(Range<usize>, TextObject)>::new();
4366
4367 iter::from_fn(move || {
4368 loop {
4369 while let Some(capture) = captures.pop() {
4370 if capture.0.overlaps(&range) {
4371 return Some(capture);
4372 }
4373 }
4374
4375 let mat = matches.peek()?;
4376
4377 let Some(config) = configs[mat.grammar_index].as_ref() else {
4378 matches.advance();
4379 continue;
4380 };
4381
4382 for capture in mat.captures {
4383 let Some(ix) = config
4384 .text_objects_by_capture_ix
4385 .binary_search_by_key(&capture.index, |e| e.0)
4386 .ok()
4387 else {
4388 continue;
4389 };
4390 let text_object = config.text_objects_by_capture_ix[ix].1;
4391 let byte_range = capture.node.byte_range();
4392
4393 let mut found = false;
4394 for (range, existing) in captures.iter_mut() {
4395 if existing == &text_object {
4396 range.start = range.start.min(byte_range.start);
4397 range.end = range.end.max(byte_range.end);
4398 found = true;
4399 break;
4400 }
4401 }
4402
4403 if !found {
4404 captures.push((byte_range, text_object));
4405 }
4406 }
4407
4408 matches.advance();
4409 }
4410 })
4411 }
4412
4413 /// Returns enclosing bracket ranges containing the given range
4414 pub fn enclosing_bracket_ranges<T: ToOffset>(
4415 &self,
4416 range: Range<T>,
4417 ) -> impl Iterator<Item = BracketMatch> + '_ {
4418 let range = range.start.to_offset(self)..range.end.to_offset(self);
4419
4420 self.bracket_ranges(range.clone()).filter(move |pair| {
4421 pair.open_range.start <= range.start && pair.close_range.end >= range.end
4422 })
4423 }
4424
4425 /// Returns the smallest enclosing bracket ranges containing the given range or None if no brackets contain range
4426 ///
4427 /// Can optionally pass a range_filter to filter the ranges of brackets to consider
4428 pub fn innermost_enclosing_bracket_ranges<T: ToOffset>(
4429 &self,
4430 range: Range<T>,
4431 range_filter: Option<&dyn Fn(Range<usize>, Range<usize>) -> bool>,
4432 ) -> Option<(Range<usize>, Range<usize>)> {
4433 let range = range.start.to_offset(self)..range.end.to_offset(self);
4434
4435 // Get the ranges of the innermost pair of brackets.
4436 let mut result: Option<(Range<usize>, Range<usize>)> = None;
4437
4438 for pair in self.enclosing_bracket_ranges(range) {
4439 if let Some(range_filter) = range_filter
4440 && !range_filter(pair.open_range.clone(), pair.close_range.clone())
4441 {
4442 continue;
4443 }
4444
4445 let len = pair.close_range.end - pair.open_range.start;
4446
4447 if let Some((existing_open, existing_close)) = &result {
4448 let existing_len = existing_close.end - existing_open.start;
4449 if len > existing_len {
4450 continue;
4451 }
4452 }
4453
4454 result = Some((pair.open_range, pair.close_range));
4455 }
4456
4457 result
4458 }
4459
4460 /// Returns anchor ranges for any matches of the redaction query.
4461 /// The buffer can be associated with multiple languages, and the redaction query associated with each
4462 /// will be run on the relevant section of the buffer.
4463 pub fn redacted_ranges<T: ToOffset>(
4464 &self,
4465 range: Range<T>,
4466 ) -> impl Iterator<Item = Range<usize>> + '_ {
4467 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4468 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4469 grammar
4470 .redactions_config
4471 .as_ref()
4472 .map(|config| &config.query)
4473 });
4474
4475 let configs = syntax_matches
4476 .grammars()
4477 .iter()
4478 .map(|grammar| grammar.redactions_config.as_ref())
4479 .collect::<Vec<_>>();
4480
4481 iter::from_fn(move || {
4482 let redacted_range = syntax_matches
4483 .peek()
4484 .and_then(|mat| {
4485 configs[mat.grammar_index].and_then(|config| {
4486 mat.captures
4487 .iter()
4488 .find(|capture| capture.index == config.redaction_capture_ix)
4489 })
4490 })
4491 .map(|mat| mat.node.byte_range());
4492 syntax_matches.advance();
4493 redacted_range
4494 })
4495 }
4496
4497 pub fn injections_intersecting_range<T: ToOffset>(
4498 &self,
4499 range: Range<T>,
4500 ) -> impl Iterator<Item = (Range<usize>, &Arc<Language>)> + '_ {
4501 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4502
4503 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4504 grammar
4505 .injection_config
4506 .as_ref()
4507 .map(|config| &config.query)
4508 });
4509
4510 let configs = syntax_matches
4511 .grammars()
4512 .iter()
4513 .map(|grammar| grammar.injection_config.as_ref())
4514 .collect::<Vec<_>>();
4515
4516 iter::from_fn(move || {
4517 let ranges = syntax_matches.peek().and_then(|mat| {
4518 let config = &configs[mat.grammar_index]?;
4519 let content_capture_range = mat.captures.iter().find_map(|capture| {
4520 if capture.index == config.content_capture_ix {
4521 Some(capture.node.byte_range())
4522 } else {
4523 None
4524 }
4525 })?;
4526 let language = self.language_at(content_capture_range.start)?;
4527 Some((content_capture_range, language))
4528 });
4529 syntax_matches.advance();
4530 ranges
4531 })
4532 }
4533
4534 pub fn runnable_ranges(
4535 &self,
4536 offset_range: Range<usize>,
4537 ) -> impl Iterator<Item = RunnableRange> + '_ {
4538 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4539 grammar.runnable_config.as_ref().map(|config| &config.query)
4540 });
4541
4542 let test_configs = syntax_matches
4543 .grammars()
4544 .iter()
4545 .map(|grammar| grammar.runnable_config.as_ref())
4546 .collect::<Vec<_>>();
4547
4548 iter::from_fn(move || {
4549 loop {
4550 let mat = syntax_matches.peek()?;
4551
4552 let test_range = test_configs[mat.grammar_index].and_then(|test_configs| {
4553 let mut run_range = None;
4554 let full_range = mat.captures.iter().fold(
4555 Range {
4556 start: usize::MAX,
4557 end: 0,
4558 },
4559 |mut acc, next| {
4560 let byte_range = next.node.byte_range();
4561 if acc.start > byte_range.start {
4562 acc.start = byte_range.start;
4563 }
4564 if acc.end < byte_range.end {
4565 acc.end = byte_range.end;
4566 }
4567 acc
4568 },
4569 );
4570 if full_range.start > full_range.end {
4571 // We did not find a full spanning range of this match.
4572 return None;
4573 }
4574 let extra_captures: SmallVec<[_; 1]> =
4575 SmallVec::from_iter(mat.captures.iter().filter_map(|capture| {
4576 test_configs
4577 .extra_captures
4578 .get(capture.index as usize)
4579 .cloned()
4580 .and_then(|tag_name| match tag_name {
4581 RunnableCapture::Named(name) => {
4582 Some((capture.node.byte_range(), name))
4583 }
4584 RunnableCapture::Run => {
4585 let _ = run_range.insert(capture.node.byte_range());
4586 None
4587 }
4588 })
4589 }));
4590 let run_range = run_range?;
4591 let tags = test_configs
4592 .query
4593 .property_settings(mat.pattern_index)
4594 .iter()
4595 .filter_map(|property| {
4596 if *property.key == *"tag" {
4597 property
4598 .value
4599 .as_ref()
4600 .map(|value| RunnableTag(value.to_string().into()))
4601 } else {
4602 None
4603 }
4604 })
4605 .collect();
4606 let extra_captures = extra_captures
4607 .into_iter()
4608 .map(|(range, name)| {
4609 (
4610 name.to_string(),
4611 self.text_for_range(range).collect::<String>(),
4612 )
4613 })
4614 .collect();
4615 // All tags should have the same range.
4616 Some(RunnableRange {
4617 run_range,
4618 full_range,
4619 runnable: Runnable {
4620 tags,
4621 language: mat.language,
4622 buffer: self.remote_id(),
4623 },
4624 extra_captures,
4625 buffer_id: self.remote_id(),
4626 })
4627 });
4628
4629 syntax_matches.advance();
4630 if test_range.is_some() {
4631 // It's fine for us to short-circuit on .peek()? returning None. We don't want to return None from this iter if we
4632 // had a capture that did not contain a run marker, hence we'll just loop around for the next capture.
4633 return test_range;
4634 }
4635 }
4636 })
4637 }
4638
4639 /// Returns selections for remote peers intersecting the given range.
4640 #[allow(clippy::type_complexity)]
4641 pub fn selections_in_range(
4642 &self,
4643 range: Range<Anchor>,
4644 include_local: bool,
4645 ) -> impl Iterator<
4646 Item = (
4647 ReplicaId,
4648 bool,
4649 CursorShape,
4650 impl Iterator<Item = &Selection<Anchor>> + '_,
4651 ),
4652 > + '_ {
4653 self.remote_selections
4654 .iter()
4655 .filter(move |(replica_id, set)| {
4656 (include_local || **replica_id != self.text.replica_id())
4657 && !set.selections.is_empty()
4658 })
4659 .map(move |(replica_id, set)| {
4660 let start_ix = match set.selections.binary_search_by(|probe| {
4661 probe.end.cmp(&range.start, self).then(Ordering::Greater)
4662 }) {
4663 Ok(ix) | Err(ix) => ix,
4664 };
4665 let end_ix = match set.selections.binary_search_by(|probe| {
4666 probe.start.cmp(&range.end, self).then(Ordering::Less)
4667 }) {
4668 Ok(ix) | Err(ix) => ix,
4669 };
4670
4671 (
4672 *replica_id,
4673 set.line_mode,
4674 set.cursor_shape,
4675 set.selections[start_ix..end_ix].iter(),
4676 )
4677 })
4678 }
4679
4680 /// Returns if the buffer contains any diagnostics.
4681 pub fn has_diagnostics(&self) -> bool {
4682 !self.diagnostics.is_empty()
4683 }
4684
4685 /// Returns all the diagnostics intersecting the given range.
4686 pub fn diagnostics_in_range<'a, T, O>(
4687 &'a self,
4688 search_range: Range<T>,
4689 reversed: bool,
4690 ) -> impl 'a + Iterator<Item = DiagnosticEntryRef<'a, O>>
4691 where
4692 T: 'a + Clone + ToOffset,
4693 O: 'a + FromAnchor,
4694 {
4695 let mut iterators: Vec<_> = self
4696 .diagnostics
4697 .iter()
4698 .map(|(_, collection)| {
4699 collection
4700 .range::<T, text::Anchor>(search_range.clone(), self, true, reversed)
4701 .peekable()
4702 })
4703 .collect();
4704
4705 std::iter::from_fn(move || {
4706 let (next_ix, _) = iterators
4707 .iter_mut()
4708 .enumerate()
4709 .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
4710 .min_by(|(_, a), (_, b)| {
4711 let cmp = a
4712 .range
4713 .start
4714 .cmp(&b.range.start, self)
4715 // when range is equal, sort by diagnostic severity
4716 .then(a.diagnostic.severity.cmp(&b.diagnostic.severity))
4717 // and stabilize order with group_id
4718 .then(a.diagnostic.group_id.cmp(&b.diagnostic.group_id));
4719 if reversed { cmp.reverse() } else { cmp }
4720 })?;
4721 iterators[next_ix]
4722 .next()
4723 .map(
4724 |DiagnosticEntryRef { range, diagnostic }| DiagnosticEntryRef {
4725 diagnostic,
4726 range: FromAnchor::from_anchor(&range.start, self)
4727 ..FromAnchor::from_anchor(&range.end, self),
4728 },
4729 )
4730 })
4731 }
4732
4733 /// Raw access to the diagnostic sets. Typically `diagnostic_groups` or `diagnostic_group`
4734 /// should be used instead.
4735 pub fn diagnostic_sets(&self) -> &SmallVec<[(LanguageServerId, DiagnosticSet); 2]> {
4736 &self.diagnostics
4737 }
4738
4739 /// Returns all the diagnostic groups associated with the given
4740 /// language server ID. If no language server ID is provided,
4741 /// all diagnostics groups are returned.
4742 pub fn diagnostic_groups(
4743 &self,
4744 language_server_id: Option<LanguageServerId>,
4745 ) -> Vec<(LanguageServerId, DiagnosticGroup<'_, Anchor>)> {
4746 let mut groups = Vec::new();
4747
4748 if let Some(language_server_id) = language_server_id {
4749 if let Ok(ix) = self
4750 .diagnostics
4751 .binary_search_by_key(&language_server_id, |e| e.0)
4752 {
4753 self.diagnostics[ix]
4754 .1
4755 .groups(language_server_id, &mut groups, self);
4756 }
4757 } else {
4758 for (language_server_id, diagnostics) in self.diagnostics.iter() {
4759 diagnostics.groups(*language_server_id, &mut groups, self);
4760 }
4761 }
4762
4763 groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
4764 let a_start = &group_a.entries[group_a.primary_ix].range.start;
4765 let b_start = &group_b.entries[group_b.primary_ix].range.start;
4766 a_start.cmp(b_start, self).then_with(|| id_a.cmp(id_b))
4767 });
4768
4769 groups
4770 }
4771
4772 /// Returns an iterator over the diagnostics for the given group.
4773 pub fn diagnostic_group<O>(
4774 &self,
4775 group_id: usize,
4776 ) -> impl Iterator<Item = DiagnosticEntryRef<'_, O>> + use<'_, O>
4777 where
4778 O: FromAnchor + 'static,
4779 {
4780 self.diagnostics
4781 .iter()
4782 .flat_map(move |(_, set)| set.group(group_id, self))
4783 }
4784
4785 /// An integer version number that accounts for all updates besides
4786 /// the buffer's text itself (which is versioned via a version vector).
4787 pub fn non_text_state_update_count(&self) -> usize {
4788 self.non_text_state_update_count
4789 }
4790
4791 /// An integer version that changes when the buffer's syntax changes.
4792 pub fn syntax_update_count(&self) -> usize {
4793 self.syntax.update_count()
4794 }
4795
4796 /// Returns a snapshot of underlying file.
4797 pub fn file(&self) -> Option<&Arc<dyn File>> {
4798 self.file.as_ref()
4799 }
4800
4801 pub fn resolve_file_path(&self, include_root: bool, cx: &App) -> Option<String> {
4802 if let Some(file) = self.file() {
4803 if file.path().file_name().is_none() || include_root {
4804 Some(file.full_path(cx).to_string_lossy().into_owned())
4805 } else {
4806 Some(file.path().display(file.path_style(cx)).to_string())
4807 }
4808 } else {
4809 None
4810 }
4811 }
4812
4813 pub fn words_in_range(&self, query: WordsQuery) -> BTreeMap<String, Range<Anchor>> {
4814 let query_str = query.fuzzy_contents;
4815 if query_str.is_some_and(|query| query.is_empty()) {
4816 return BTreeMap::default();
4817 }
4818
4819 let classifier = CharClassifier::new(self.language.clone().map(|language| LanguageScope {
4820 language,
4821 override_id: None,
4822 }));
4823
4824 let mut query_ix = 0;
4825 let query_chars = query_str.map(|query| query.chars().collect::<Vec<_>>());
4826 let query_len = query_chars.as_ref().map_or(0, |query| query.len());
4827
4828 let mut words = BTreeMap::default();
4829 let mut current_word_start_ix = None;
4830 let mut chunk_ix = query.range.start;
4831 for chunk in self.chunks(query.range, false) {
4832 for (i, c) in chunk.text.char_indices() {
4833 let ix = chunk_ix + i;
4834 if classifier.is_word(c) {
4835 if current_word_start_ix.is_none() {
4836 current_word_start_ix = Some(ix);
4837 }
4838
4839 if let Some(query_chars) = &query_chars
4840 && query_ix < query_len
4841 && c.to_lowercase().eq(query_chars[query_ix].to_lowercase())
4842 {
4843 query_ix += 1;
4844 }
4845 continue;
4846 } else if let Some(word_start) = current_word_start_ix.take()
4847 && query_ix == query_len
4848 {
4849 let word_range = self.anchor_before(word_start)..self.anchor_after(ix);
4850 let mut word_text = self.text_for_range(word_start..ix).peekable();
4851 let first_char = word_text
4852 .peek()
4853 .and_then(|first_chunk| first_chunk.chars().next());
4854 // Skip empty and "words" starting with digits as a heuristic to reduce useless completions
4855 if !query.skip_digits
4856 || first_char.is_none_or(|first_char| !first_char.is_digit(10))
4857 {
4858 words.insert(word_text.collect(), word_range);
4859 }
4860 }
4861 query_ix = 0;
4862 }
4863 chunk_ix += chunk.text.len();
4864 }
4865
4866 words
4867 }
4868}
4869
4870pub struct WordsQuery<'a> {
4871 /// Only returns words with all chars from the fuzzy string in them.
4872 pub fuzzy_contents: Option<&'a str>,
4873 /// Skips words that start with a digit.
4874 pub skip_digits: bool,
4875 /// Buffer offset range, to look for words.
4876 pub range: Range<usize>,
4877}
4878
4879fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
4880 indent_size_for_text(text.chars_at(Point::new(row, 0)))
4881}
4882
4883fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
4884 let mut result = IndentSize::spaces(0);
4885 for c in text {
4886 let kind = match c {
4887 ' ' => IndentKind::Space,
4888 '\t' => IndentKind::Tab,
4889 _ => break,
4890 };
4891 if result.len == 0 {
4892 result.kind = kind;
4893 }
4894 result.len += 1;
4895 }
4896 result
4897}
4898
4899impl Clone for BufferSnapshot {
4900 fn clone(&self) -> Self {
4901 Self {
4902 text: self.text.clone(),
4903 syntax: self.syntax.clone(),
4904 file: self.file.clone(),
4905 remote_selections: self.remote_selections.clone(),
4906 diagnostics: self.diagnostics.clone(),
4907 language: self.language.clone(),
4908 tree_sitter_data: self.tree_sitter_data.clone(),
4909 non_text_state_update_count: self.non_text_state_update_count,
4910 }
4911 }
4912}
4913
4914impl Deref for BufferSnapshot {
4915 type Target = text::BufferSnapshot;
4916
4917 fn deref(&self) -> &Self::Target {
4918 &self.text
4919 }
4920}
4921
4922unsafe impl Send for BufferChunks<'_> {}
4923
4924impl<'a> BufferChunks<'a> {
4925 pub(crate) fn new(
4926 text: &'a Rope,
4927 range: Range<usize>,
4928 syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
4929 diagnostics: bool,
4930 buffer_snapshot: Option<&'a BufferSnapshot>,
4931 ) -> Self {
4932 let mut highlights = None;
4933 if let Some((captures, highlight_maps)) = syntax {
4934 highlights = Some(BufferChunkHighlights {
4935 captures,
4936 next_capture: None,
4937 stack: Default::default(),
4938 highlight_maps,
4939 })
4940 }
4941
4942 let diagnostic_endpoints = diagnostics.then(|| Vec::new().into_iter().peekable());
4943 let chunks = text.chunks_in_range(range.clone());
4944
4945 let mut this = BufferChunks {
4946 range,
4947 buffer_snapshot,
4948 chunks,
4949 diagnostic_endpoints,
4950 error_depth: 0,
4951 warning_depth: 0,
4952 information_depth: 0,
4953 hint_depth: 0,
4954 unnecessary_depth: 0,
4955 underline: true,
4956 highlights,
4957 };
4958 this.initialize_diagnostic_endpoints();
4959 this
4960 }
4961
4962 /// Seeks to the given byte offset in the buffer.
4963 pub fn seek(&mut self, range: Range<usize>) {
4964 let old_range = std::mem::replace(&mut self.range, range.clone());
4965 self.chunks.set_range(self.range.clone());
4966 if let Some(highlights) = self.highlights.as_mut() {
4967 if old_range.start <= self.range.start && old_range.end >= self.range.end {
4968 // Reuse existing highlights stack, as the new range is a subrange of the old one.
4969 highlights
4970 .stack
4971 .retain(|(end_offset, _)| *end_offset > range.start);
4972 if let Some(capture) = &highlights.next_capture
4973 && range.start >= capture.node.start_byte()
4974 {
4975 let next_capture_end = capture.node.end_byte();
4976 if range.start < next_capture_end {
4977 highlights.stack.push((
4978 next_capture_end,
4979 highlights.highlight_maps[capture.grammar_index].get(capture.index),
4980 ));
4981 }
4982 highlights.next_capture.take();
4983 }
4984 } else if let Some(snapshot) = self.buffer_snapshot {
4985 let (captures, highlight_maps) = snapshot.get_highlights(self.range.clone());
4986 *highlights = BufferChunkHighlights {
4987 captures,
4988 next_capture: None,
4989 stack: Default::default(),
4990 highlight_maps,
4991 };
4992 } else {
4993 // We cannot obtain new highlights for a language-aware buffer iterator, as we don't have a buffer snapshot.
4994 // Seeking such BufferChunks is not supported.
4995 debug_assert!(
4996 false,
4997 "Attempted to seek on a language-aware buffer iterator without associated buffer snapshot"
4998 );
4999 }
5000
5001 highlights.captures.set_byte_range(self.range.clone());
5002 self.initialize_diagnostic_endpoints();
5003 }
5004 }
5005
5006 fn initialize_diagnostic_endpoints(&mut self) {
5007 if let Some(diagnostics) = self.diagnostic_endpoints.as_mut()
5008 && let Some(buffer) = self.buffer_snapshot
5009 {
5010 let mut diagnostic_endpoints = Vec::new();
5011 for entry in buffer.diagnostics_in_range::<_, usize>(self.range.clone(), false) {
5012 diagnostic_endpoints.push(DiagnosticEndpoint {
5013 offset: entry.range.start,
5014 is_start: true,
5015 severity: entry.diagnostic.severity,
5016 is_unnecessary: entry.diagnostic.is_unnecessary,
5017 underline: entry.diagnostic.underline,
5018 });
5019 diagnostic_endpoints.push(DiagnosticEndpoint {
5020 offset: entry.range.end,
5021 is_start: false,
5022 severity: entry.diagnostic.severity,
5023 is_unnecessary: entry.diagnostic.is_unnecessary,
5024 underline: entry.diagnostic.underline,
5025 });
5026 }
5027 diagnostic_endpoints
5028 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
5029 *diagnostics = diagnostic_endpoints.into_iter().peekable();
5030 self.hint_depth = 0;
5031 self.error_depth = 0;
5032 self.warning_depth = 0;
5033 self.information_depth = 0;
5034 }
5035 }
5036
5037 /// The current byte offset in the buffer.
5038 pub fn offset(&self) -> usize {
5039 self.range.start
5040 }
5041
5042 pub fn range(&self) -> Range<usize> {
5043 self.range.clone()
5044 }
5045
5046 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
5047 let depth = match endpoint.severity {
5048 DiagnosticSeverity::ERROR => &mut self.error_depth,
5049 DiagnosticSeverity::WARNING => &mut self.warning_depth,
5050 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
5051 DiagnosticSeverity::HINT => &mut self.hint_depth,
5052 _ => return,
5053 };
5054 if endpoint.is_start {
5055 *depth += 1;
5056 } else {
5057 *depth -= 1;
5058 }
5059
5060 if endpoint.is_unnecessary {
5061 if endpoint.is_start {
5062 self.unnecessary_depth += 1;
5063 } else {
5064 self.unnecessary_depth -= 1;
5065 }
5066 }
5067 }
5068
5069 fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
5070 if self.error_depth > 0 {
5071 Some(DiagnosticSeverity::ERROR)
5072 } else if self.warning_depth > 0 {
5073 Some(DiagnosticSeverity::WARNING)
5074 } else if self.information_depth > 0 {
5075 Some(DiagnosticSeverity::INFORMATION)
5076 } else if self.hint_depth > 0 {
5077 Some(DiagnosticSeverity::HINT)
5078 } else {
5079 None
5080 }
5081 }
5082
5083 fn current_code_is_unnecessary(&self) -> bool {
5084 self.unnecessary_depth > 0
5085 }
5086}
5087
5088impl<'a> Iterator for BufferChunks<'a> {
5089 type Item = Chunk<'a>;
5090
5091 fn next(&mut self) -> Option<Self::Item> {
5092 let mut next_capture_start = usize::MAX;
5093 let mut next_diagnostic_endpoint = usize::MAX;
5094
5095 if let Some(highlights) = self.highlights.as_mut() {
5096 while let Some((parent_capture_end, _)) = highlights.stack.last() {
5097 if *parent_capture_end <= self.range.start {
5098 highlights.stack.pop();
5099 } else {
5100 break;
5101 }
5102 }
5103
5104 if highlights.next_capture.is_none() {
5105 highlights.next_capture = highlights.captures.next();
5106 }
5107
5108 while let Some(capture) = highlights.next_capture.as_ref() {
5109 if self.range.start < capture.node.start_byte() {
5110 next_capture_start = capture.node.start_byte();
5111 break;
5112 } else {
5113 let highlight_id =
5114 highlights.highlight_maps[capture.grammar_index].get(capture.index);
5115 highlights
5116 .stack
5117 .push((capture.node.end_byte(), highlight_id));
5118 highlights.next_capture = highlights.captures.next();
5119 }
5120 }
5121 }
5122
5123 let mut diagnostic_endpoints = std::mem::take(&mut self.diagnostic_endpoints);
5124 if let Some(diagnostic_endpoints) = diagnostic_endpoints.as_mut() {
5125 while let Some(endpoint) = diagnostic_endpoints.peek().copied() {
5126 if endpoint.offset <= self.range.start {
5127 self.update_diagnostic_depths(endpoint);
5128 diagnostic_endpoints.next();
5129 self.underline = endpoint.underline;
5130 } else {
5131 next_diagnostic_endpoint = endpoint.offset;
5132 break;
5133 }
5134 }
5135 }
5136 self.diagnostic_endpoints = diagnostic_endpoints;
5137
5138 if let Some(ChunkBitmaps {
5139 text: chunk,
5140 chars: chars_map,
5141 tabs,
5142 }) = self.chunks.peek_with_bitmaps()
5143 {
5144 let chunk_start = self.range.start;
5145 let mut chunk_end = (self.chunks.offset() + chunk.len())
5146 .min(next_capture_start)
5147 .min(next_diagnostic_endpoint);
5148 let mut highlight_id = None;
5149 if let Some(highlights) = self.highlights.as_ref()
5150 && let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last()
5151 {
5152 chunk_end = chunk_end.min(*parent_capture_end);
5153 highlight_id = Some(*parent_highlight_id);
5154 }
5155 let bit_start = chunk_start - self.chunks.offset();
5156 let bit_end = chunk_end - self.chunks.offset();
5157
5158 let slice = &chunk[bit_start..bit_end];
5159
5160 let mask = 1u128.unbounded_shl(bit_end as u32).wrapping_sub(1);
5161 let tabs = (tabs >> bit_start) & mask;
5162 let chars = (chars_map >> bit_start) & mask;
5163
5164 self.range.start = chunk_end;
5165 if self.range.start == self.chunks.offset() + chunk.len() {
5166 self.chunks.next().unwrap();
5167 }
5168
5169 Some(Chunk {
5170 text: slice,
5171 syntax_highlight_id: highlight_id,
5172 underline: self.underline,
5173 diagnostic_severity: self.current_diagnostic_severity(),
5174 is_unnecessary: self.current_code_is_unnecessary(),
5175 tabs,
5176 chars,
5177 ..Chunk::default()
5178 })
5179 } else {
5180 None
5181 }
5182 }
5183}
5184
5185impl operation_queue::Operation for Operation {
5186 fn lamport_timestamp(&self) -> clock::Lamport {
5187 match self {
5188 Operation::Buffer(_) => {
5189 unreachable!("buffer operations should never be deferred at this layer")
5190 }
5191 Operation::UpdateDiagnostics {
5192 lamport_timestamp, ..
5193 }
5194 | Operation::UpdateSelections {
5195 lamport_timestamp, ..
5196 }
5197 | Operation::UpdateCompletionTriggers {
5198 lamport_timestamp, ..
5199 }
5200 | Operation::UpdateLineEnding {
5201 lamport_timestamp, ..
5202 } => *lamport_timestamp,
5203 }
5204 }
5205}
5206
5207impl Default for Diagnostic {
5208 fn default() -> Self {
5209 Self {
5210 source: Default::default(),
5211 source_kind: DiagnosticSourceKind::Other,
5212 code: None,
5213 code_description: None,
5214 severity: DiagnosticSeverity::ERROR,
5215 message: Default::default(),
5216 markdown: None,
5217 group_id: 0,
5218 is_primary: false,
5219 is_disk_based: false,
5220 is_unnecessary: false,
5221 underline: true,
5222 data: None,
5223 }
5224 }
5225}
5226
5227impl IndentSize {
5228 /// Returns an [`IndentSize`] representing the given spaces.
5229 pub fn spaces(len: u32) -> Self {
5230 Self {
5231 len,
5232 kind: IndentKind::Space,
5233 }
5234 }
5235
5236 /// Returns an [`IndentSize`] representing a tab.
5237 pub fn tab() -> Self {
5238 Self {
5239 len: 1,
5240 kind: IndentKind::Tab,
5241 }
5242 }
5243
5244 /// An iterator over the characters represented by this [`IndentSize`].
5245 pub fn chars(&self) -> impl Iterator<Item = char> {
5246 iter::repeat(self.char()).take(self.len as usize)
5247 }
5248
5249 /// The character representation of this [`IndentSize`].
5250 pub fn char(&self) -> char {
5251 match self.kind {
5252 IndentKind::Space => ' ',
5253 IndentKind::Tab => '\t',
5254 }
5255 }
5256
5257 /// Consumes the current [`IndentSize`] and returns a new one that has
5258 /// been shrunk or enlarged by the given size along the given direction.
5259 pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
5260 match direction {
5261 Ordering::Less => {
5262 if self.kind == size.kind && self.len >= size.len {
5263 self.len -= size.len;
5264 }
5265 }
5266 Ordering::Equal => {}
5267 Ordering::Greater => {
5268 if self.len == 0 {
5269 self = size;
5270 } else if self.kind == size.kind {
5271 self.len += size.len;
5272 }
5273 }
5274 }
5275 self
5276 }
5277
5278 pub fn len_with_expanded_tabs(&self, tab_size: NonZeroU32) -> usize {
5279 match self.kind {
5280 IndentKind::Space => self.len as usize,
5281 IndentKind::Tab => self.len as usize * tab_size.get() as usize,
5282 }
5283 }
5284}
5285
5286#[cfg(any(test, feature = "test-support"))]
5287pub struct TestFile {
5288 pub path: Arc<RelPath>,
5289 pub root_name: String,
5290 pub local_root: Option<PathBuf>,
5291}
5292
5293#[cfg(any(test, feature = "test-support"))]
5294impl File for TestFile {
5295 fn path(&self) -> &Arc<RelPath> {
5296 &self.path
5297 }
5298
5299 fn full_path(&self, _: &gpui::App) -> PathBuf {
5300 PathBuf::from(self.root_name.clone()).join(self.path.as_std_path())
5301 }
5302
5303 fn as_local(&self) -> Option<&dyn LocalFile> {
5304 if self.local_root.is_some() {
5305 Some(self)
5306 } else {
5307 None
5308 }
5309 }
5310
5311 fn disk_state(&self) -> DiskState {
5312 unimplemented!()
5313 }
5314
5315 fn file_name<'a>(&'a self, _: &'a gpui::App) -> &'a str {
5316 self.path().file_name().unwrap_or(self.root_name.as_ref())
5317 }
5318
5319 fn worktree_id(&self, _: &App) -> WorktreeId {
5320 WorktreeId::from_usize(0)
5321 }
5322
5323 fn to_proto(&self, _: &App) -> rpc::proto::File {
5324 unimplemented!()
5325 }
5326
5327 fn is_private(&self) -> bool {
5328 false
5329 }
5330
5331 fn path_style(&self, _cx: &App) -> PathStyle {
5332 PathStyle::local()
5333 }
5334}
5335
5336#[cfg(any(test, feature = "test-support"))]
5337impl LocalFile for TestFile {
5338 fn abs_path(&self, _cx: &App) -> PathBuf {
5339 PathBuf::from(self.local_root.as_ref().unwrap())
5340 .join(&self.root_name)
5341 .join(self.path.as_std_path())
5342 }
5343
5344 fn load(&self, _cx: &App) -> Task<Result<String>> {
5345 unimplemented!()
5346 }
5347
5348 fn load_bytes(&self, _cx: &App) -> Task<Result<Vec<u8>>> {
5349 unimplemented!()
5350 }
5351}
5352
5353pub(crate) fn contiguous_ranges(
5354 values: impl Iterator<Item = u32>,
5355 max_len: usize,
5356) -> impl Iterator<Item = Range<u32>> {
5357 let mut values = values;
5358 let mut current_range: Option<Range<u32>> = None;
5359 std::iter::from_fn(move || {
5360 loop {
5361 if let Some(value) = values.next() {
5362 if let Some(range) = &mut current_range
5363 && value == range.end
5364 && range.len() < max_len
5365 {
5366 range.end += 1;
5367 continue;
5368 }
5369
5370 let prev_range = current_range.clone();
5371 current_range = Some(value..(value + 1));
5372 if prev_range.is_some() {
5373 return prev_range;
5374 }
5375 } else {
5376 return current_range.take();
5377 }
5378 }
5379 })
5380}
5381
5382#[derive(Default, Debug)]
5383pub struct CharClassifier {
5384 scope: Option<LanguageScope>,
5385 scope_context: Option<CharScopeContext>,
5386 ignore_punctuation: bool,
5387}
5388
5389impl CharClassifier {
5390 pub fn new(scope: Option<LanguageScope>) -> Self {
5391 Self {
5392 scope,
5393 scope_context: None,
5394 ignore_punctuation: false,
5395 }
5396 }
5397
5398 pub fn scope_context(self, scope_context: Option<CharScopeContext>) -> Self {
5399 Self {
5400 scope_context,
5401 ..self
5402 }
5403 }
5404
5405 pub fn ignore_punctuation(self, ignore_punctuation: bool) -> Self {
5406 Self {
5407 ignore_punctuation,
5408 ..self
5409 }
5410 }
5411
5412 pub fn is_whitespace(&self, c: char) -> bool {
5413 self.kind(c) == CharKind::Whitespace
5414 }
5415
5416 pub fn is_word(&self, c: char) -> bool {
5417 self.kind(c) == CharKind::Word
5418 }
5419
5420 pub fn is_punctuation(&self, c: char) -> bool {
5421 self.kind(c) == CharKind::Punctuation
5422 }
5423
5424 pub fn kind_with(&self, c: char, ignore_punctuation: bool) -> CharKind {
5425 if c.is_alphanumeric() || c == '_' {
5426 return CharKind::Word;
5427 }
5428
5429 if let Some(scope) = &self.scope {
5430 let characters = match self.scope_context {
5431 Some(CharScopeContext::Completion) => scope.completion_query_characters(),
5432 Some(CharScopeContext::LinkedEdit) => scope.linked_edit_characters(),
5433 None => scope.word_characters(),
5434 };
5435 if let Some(characters) = characters
5436 && characters.contains(&c)
5437 {
5438 return CharKind::Word;
5439 }
5440 }
5441
5442 if c.is_whitespace() {
5443 return CharKind::Whitespace;
5444 }
5445
5446 if ignore_punctuation {
5447 CharKind::Word
5448 } else {
5449 CharKind::Punctuation
5450 }
5451 }
5452
5453 pub fn kind(&self, c: char) -> CharKind {
5454 self.kind_with(c, self.ignore_punctuation)
5455 }
5456}
5457
5458/// Find all of the ranges of whitespace that occur at the ends of lines
5459/// in the given rope.
5460///
5461/// This could also be done with a regex search, but this implementation
5462/// avoids copying text.
5463pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
5464 let mut ranges = Vec::new();
5465
5466 let mut offset = 0;
5467 let mut prev_chunk_trailing_whitespace_range = 0..0;
5468 for chunk in rope.chunks() {
5469 let mut prev_line_trailing_whitespace_range = 0..0;
5470 for (i, line) in chunk.split('\n').enumerate() {
5471 let line_end_offset = offset + line.len();
5472 let trimmed_line_len = line.trim_end_matches([' ', '\t']).len();
5473 let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
5474
5475 if i == 0 && trimmed_line_len == 0 {
5476 trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
5477 }
5478 if !prev_line_trailing_whitespace_range.is_empty() {
5479 ranges.push(prev_line_trailing_whitespace_range);
5480 }
5481
5482 offset = line_end_offset + 1;
5483 prev_line_trailing_whitespace_range = trailing_whitespace_range;
5484 }
5485
5486 offset -= 1;
5487 prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
5488 }
5489
5490 if !prev_chunk_trailing_whitespace_range.is_empty() {
5491 ranges.push(prev_chunk_trailing_whitespace_range);
5492 }
5493
5494 ranges
5495}