1pub mod row_chunk;
2
3use crate::{
4 DebuggerTextObject, LanguageScope, Outline, OutlineConfig, RunnableCapture, RunnableTag,
5 TextObject, TreeSitterOptions,
6 diagnostic_set::{DiagnosticEntry, DiagnosticEntryRef, DiagnosticGroup},
7 language_settings::{LanguageSettings, language_settings},
8 outline::OutlineItem,
9 row_chunk::RowChunks,
10 syntax_map::{
11 SyntaxLayer, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures, SyntaxMapMatch,
12 SyntaxMapMatches, SyntaxSnapshot, ToTreeSitterPoint,
13 },
14 task_context::RunnableRange,
15 text_diff::text_diff,
16};
17pub use crate::{
18 Grammar, Language, LanguageRegistry,
19 diagnostic_set::DiagnosticSet,
20 highlight_map::{HighlightId, HighlightMap},
21 proto,
22};
23use anyhow::{Context as _, Result};
24pub use clock::ReplicaId;
25use clock::{Global, Lamport};
26use collections::{HashMap, HashSet};
27use fs::MTime;
28use futures::channel::oneshot;
29use gpui::{
30 App, AppContext as _, Context, Entity, EventEmitter, HighlightStyle, SharedString, StyledText,
31 Task, TaskLabel, TextStyle,
32};
33
34use itertools::Itertools;
35use lsp::{LanguageServerId, NumberOrString};
36use parking_lot::{Mutex, RawMutex, lock_api::MutexGuard};
37use serde::{Deserialize, Serialize};
38use serde_json::Value;
39use settings::WorktreeId;
40use smallvec::SmallVec;
41use smol::future::yield_now;
42use std::{
43 any::Any,
44 borrow::Cow,
45 cell::Cell,
46 cmp::{self, Ordering, Reverse},
47 collections::{BTreeMap, BTreeSet},
48 future::Future,
49 iter::{self, Iterator, Peekable},
50 mem,
51 num::NonZeroU32,
52 ops::{Deref, Not, Range},
53 path::PathBuf,
54 rc,
55 sync::{Arc, LazyLock},
56 time::{Duration, Instant},
57 vec,
58};
59use sum_tree::TreeMap;
60use text::operation_queue::OperationQueue;
61use text::*;
62pub use text::{
63 Anchor, Bias, Buffer as TextBuffer, BufferId, BufferSnapshot as TextBufferSnapshot, Edit,
64 LineIndent, OffsetRangeExt, OffsetUtf16, Patch, Point, PointUtf16, Rope, Selection,
65 SelectionGoal, Subscription, TextDimension, TextSummary, ToOffset, ToOffsetUtf16, ToPoint,
66 ToPointUtf16, Transaction, TransactionId, Unclipped,
67};
68use theme::{ActiveTheme as _, SyntaxTheme};
69#[cfg(any(test, feature = "test-support"))]
70use util::RandomCharIter;
71use util::{RangeExt, debug_panic, maybe, paths::PathStyle, rel_path::RelPath};
72
73#[cfg(any(test, feature = "test-support"))]
74pub use {tree_sitter_python, tree_sitter_rust, tree_sitter_typescript};
75
76pub use lsp::DiagnosticSeverity;
77
78/// A label for the background task spawned by the buffer to compute
79/// a diff against the contents of its file.
80pub static BUFFER_DIFF_TASK: LazyLock<TaskLabel> = LazyLock::new(TaskLabel::new);
81
82/// Indicate whether a [`Buffer`] has permissions to edit.
83#[derive(PartialEq, Clone, Copy, Debug)]
84pub enum Capability {
85 /// The buffer is a mutable replica.
86 ReadWrite,
87 /// The buffer is a read-only replica.
88 ReadOnly,
89}
90
91pub type BufferRow = u32;
92
93/// An in-memory representation of a source code file, including its text,
94/// syntax trees, git status, and diagnostics.
95pub struct Buffer {
96 text: TextBuffer,
97 branch_state: Option<BufferBranchState>,
98 /// Filesystem state, `None` when there is no path.
99 file: Option<Arc<dyn File>>,
100 /// The mtime of the file when this buffer was last loaded from
101 /// or saved to disk.
102 saved_mtime: Option<MTime>,
103 /// The version vector when this buffer was last loaded from
104 /// or saved to disk.
105 saved_version: clock::Global,
106 preview_version: clock::Global,
107 transaction_depth: usize,
108 was_dirty_before_starting_transaction: Option<bool>,
109 reload_task: Option<Task<Result<()>>>,
110 language: Option<Arc<Language>>,
111 autoindent_requests: Vec<Arc<AutoindentRequest>>,
112 wait_for_autoindent_txs: Vec<oneshot::Sender<()>>,
113 pending_autoindent: Option<Task<()>>,
114 sync_parse_timeout: Duration,
115 syntax_map: Mutex<SyntaxMap>,
116 reparse: Option<Task<()>>,
117 parse_status: (watch::Sender<ParseStatus>, watch::Receiver<ParseStatus>),
118 non_text_state_update_count: usize,
119 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
120 remote_selections: TreeMap<ReplicaId, SelectionSet>,
121 diagnostics_timestamp: clock::Lamport,
122 completion_triggers: BTreeSet<String>,
123 completion_triggers_per_language_server: HashMap<LanguageServerId, BTreeSet<String>>,
124 completion_triggers_timestamp: clock::Lamport,
125 deferred_ops: OperationQueue<Operation>,
126 capability: Capability,
127 has_conflict: bool,
128 /// Memoize calls to has_changes_since(saved_version).
129 /// The contents of a cell are (self.version, has_changes) at the time of a last call.
130 has_unsaved_edits: Cell<(clock::Global, bool)>,
131 change_bits: Vec<rc::Weak<Cell<bool>>>,
132 _subscriptions: Vec<gpui::Subscription>,
133 tree_sitter_data: Arc<Mutex<TreeSitterData>>,
134}
135
136#[derive(Debug, Clone)]
137pub struct TreeSitterData {
138 chunks: RowChunks,
139 brackets_by_chunks: Vec<Option<Vec<BracketMatch>>>,
140}
141
142const MAX_ROWS_IN_A_CHUNK: u32 = 50;
143
144impl TreeSitterData {
145 fn clear(&mut self) {
146 self.brackets_by_chunks = vec![None; self.chunks.len()];
147 }
148
149 fn new(snapshot: text::BufferSnapshot) -> Self {
150 let chunks = RowChunks::new(snapshot, MAX_ROWS_IN_A_CHUNK);
151 Self {
152 brackets_by_chunks: vec![None; chunks.len()],
153 chunks,
154 }
155 }
156}
157
158#[derive(Copy, Clone, Debug, PartialEq, Eq)]
159pub enum ParseStatus {
160 Idle,
161 Parsing,
162}
163
164struct BufferBranchState {
165 base_buffer: Entity<Buffer>,
166 merged_operations: Vec<Lamport>,
167}
168
169/// An immutable, cheaply cloneable representation of a fixed
170/// state of a buffer.
171pub struct BufferSnapshot {
172 pub text: text::BufferSnapshot,
173 pub syntax: SyntaxSnapshot,
174 file: Option<Arc<dyn File>>,
175 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
176 remote_selections: TreeMap<ReplicaId, SelectionSet>,
177 language: Option<Arc<Language>>,
178 non_text_state_update_count: usize,
179 tree_sitter_data: Arc<Mutex<TreeSitterData>>,
180}
181
182/// The kind and amount of indentation in a particular line. For now,
183/// assumes that indentation is all the same character.
184#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
185pub struct IndentSize {
186 /// The number of bytes that comprise the indentation.
187 pub len: u32,
188 /// The kind of whitespace used for indentation.
189 pub kind: IndentKind,
190}
191
192/// A whitespace character that's used for indentation.
193#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
194pub enum IndentKind {
195 /// An ASCII space character.
196 #[default]
197 Space,
198 /// An ASCII tab character.
199 Tab,
200}
201
202/// The shape of a selection cursor.
203#[derive(Copy, Clone, Debug, Default, PartialEq, Eq)]
204pub enum CursorShape {
205 /// A vertical bar
206 #[default]
207 Bar,
208 /// A block that surrounds the following character
209 Block,
210 /// An underline that runs along the following character
211 Underline,
212 /// A box drawn around the following character
213 Hollow,
214}
215
216impl From<settings::CursorShape> for CursorShape {
217 fn from(shape: settings::CursorShape) -> Self {
218 match shape {
219 settings::CursorShape::Bar => CursorShape::Bar,
220 settings::CursorShape::Block => CursorShape::Block,
221 settings::CursorShape::Underline => CursorShape::Underline,
222 settings::CursorShape::Hollow => CursorShape::Hollow,
223 }
224 }
225}
226
227#[derive(Clone, Debug)]
228struct SelectionSet {
229 line_mode: bool,
230 cursor_shape: CursorShape,
231 selections: Arc<[Selection<Anchor>]>,
232 lamport_timestamp: clock::Lamport,
233}
234
235/// A diagnostic associated with a certain range of a buffer.
236#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
237pub struct Diagnostic {
238 /// The name of the service that produced this diagnostic.
239 pub source: Option<String>,
240 /// A machine-readable code that identifies this diagnostic.
241 pub code: Option<NumberOrString>,
242 pub code_description: Option<lsp::Uri>,
243 /// Whether this diagnostic is a hint, warning, or error.
244 pub severity: DiagnosticSeverity,
245 /// The human-readable message associated with this diagnostic.
246 pub message: String,
247 /// The human-readable message (in markdown format)
248 pub markdown: Option<String>,
249 /// An id that identifies the group to which this diagnostic belongs.
250 ///
251 /// When a language server produces a diagnostic with
252 /// one or more associated diagnostics, those diagnostics are all
253 /// assigned a single group ID.
254 pub group_id: usize,
255 /// Whether this diagnostic is the primary diagnostic for its group.
256 ///
257 /// In a given group, the primary diagnostic is the top-level diagnostic
258 /// returned by the language server. The non-primary diagnostics are the
259 /// associated diagnostics.
260 pub is_primary: bool,
261 /// Whether this diagnostic is considered to originate from an analysis of
262 /// files on disk, as opposed to any unsaved buffer contents. This is a
263 /// property of a given diagnostic source, and is configured for a given
264 /// language server via the [`LspAdapter::disk_based_diagnostic_sources`](crate::LspAdapter::disk_based_diagnostic_sources) method
265 /// for the language server.
266 pub is_disk_based: bool,
267 /// Whether this diagnostic marks unnecessary code.
268 pub is_unnecessary: bool,
269 /// Quick separation of diagnostics groups based by their source.
270 pub source_kind: DiagnosticSourceKind,
271 /// Data from language server that produced this diagnostic. Passed back to the LS when we request code actions for this diagnostic.
272 pub data: Option<Value>,
273 /// Whether to underline the corresponding text range in the editor.
274 pub underline: bool,
275}
276
277#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)]
278pub enum DiagnosticSourceKind {
279 Pulled,
280 Pushed,
281 Other,
282}
283
284/// An operation used to synchronize this buffer with its other replicas.
285#[derive(Clone, Debug, PartialEq)]
286pub enum Operation {
287 /// A text operation.
288 Buffer(text::Operation),
289
290 /// An update to the buffer's diagnostics.
291 UpdateDiagnostics {
292 /// The id of the language server that produced the new diagnostics.
293 server_id: LanguageServerId,
294 /// The diagnostics.
295 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
296 /// The buffer's lamport timestamp.
297 lamport_timestamp: clock::Lamport,
298 },
299
300 /// An update to the most recent selections in this buffer.
301 UpdateSelections {
302 /// The selections.
303 selections: Arc<[Selection<Anchor>]>,
304 /// The buffer's lamport timestamp.
305 lamport_timestamp: clock::Lamport,
306 /// Whether the selections are in 'line mode'.
307 line_mode: bool,
308 /// The [`CursorShape`] associated with these selections.
309 cursor_shape: CursorShape,
310 },
311
312 /// An update to the characters that should trigger autocompletion
313 /// for this buffer.
314 UpdateCompletionTriggers {
315 /// The characters that trigger autocompletion.
316 triggers: Vec<String>,
317 /// The buffer's lamport timestamp.
318 lamport_timestamp: clock::Lamport,
319 /// The language server ID.
320 server_id: LanguageServerId,
321 },
322
323 /// An update to the line ending type of this buffer.
324 UpdateLineEnding {
325 /// The line ending type.
326 line_ending: LineEnding,
327 /// The buffer's lamport timestamp.
328 lamport_timestamp: clock::Lamport,
329 },
330}
331
332/// An event that occurs in a buffer.
333#[derive(Clone, Debug, PartialEq)]
334pub enum BufferEvent {
335 /// The buffer was changed in a way that must be
336 /// propagated to its other replicas.
337 Operation {
338 operation: Operation,
339 is_local: bool,
340 },
341 /// The buffer was edited.
342 Edited,
343 /// The buffer's `dirty` bit changed.
344 DirtyChanged,
345 /// The buffer was saved.
346 Saved,
347 /// The buffer's file was changed on disk.
348 FileHandleChanged,
349 /// The buffer was reloaded.
350 Reloaded,
351 /// The buffer is in need of a reload
352 ReloadNeeded,
353 /// The buffer's language was changed.
354 LanguageChanged,
355 /// The buffer's syntax trees were updated.
356 Reparsed,
357 /// The buffer's diagnostics were updated.
358 DiagnosticsUpdated,
359 /// The buffer gained or lost editing capabilities.
360 CapabilityChanged,
361}
362
363/// The file associated with a buffer.
364pub trait File: Send + Sync + Any {
365 /// Returns the [`LocalFile`] associated with this file, if the
366 /// file is local.
367 fn as_local(&self) -> Option<&dyn LocalFile>;
368
369 /// Returns whether this file is local.
370 fn is_local(&self) -> bool {
371 self.as_local().is_some()
372 }
373
374 /// Returns whether the file is new, exists in storage, or has been deleted. Includes metadata
375 /// only available in some states, such as modification time.
376 fn disk_state(&self) -> DiskState;
377
378 /// Returns the path of this file relative to the worktree's root directory.
379 fn path(&self) -> &Arc<RelPath>;
380
381 /// Returns the path of this file relative to the worktree's parent directory (this means it
382 /// includes the name of the worktree's root folder).
383 fn full_path(&self, cx: &App) -> PathBuf;
384
385 /// Returns the path style of this file.
386 fn path_style(&self, cx: &App) -> PathStyle;
387
388 /// Returns the last component of this handle's absolute path. If this handle refers to the root
389 /// of its worktree, then this method will return the name of the worktree itself.
390 fn file_name<'a>(&'a self, cx: &'a App) -> &'a str;
391
392 /// Returns the id of the worktree to which this file belongs.
393 ///
394 /// This is needed for looking up project-specific settings.
395 fn worktree_id(&self, cx: &App) -> WorktreeId;
396
397 /// Converts this file into a protobuf message.
398 fn to_proto(&self, cx: &App) -> rpc::proto::File;
399
400 /// Return whether Zed considers this to be a private file.
401 fn is_private(&self) -> bool;
402}
403
404/// The file's storage status - whether it's stored (`Present`), and if so when it was last
405/// modified. In the case where the file is not stored, it can be either `New` or `Deleted`. In the
406/// UI these two states are distinguished. For example, the buffer tab does not display a deletion
407/// indicator for new files.
408#[derive(Copy, Clone, Debug, PartialEq)]
409pub enum DiskState {
410 /// File created in Zed that has not been saved.
411 New,
412 /// File present on the filesystem.
413 Present { mtime: MTime },
414 /// Deleted file that was previously present.
415 Deleted,
416}
417
418impl DiskState {
419 /// Returns the file's last known modification time on disk.
420 pub fn mtime(self) -> Option<MTime> {
421 match self {
422 DiskState::New => None,
423 DiskState::Present { mtime } => Some(mtime),
424 DiskState::Deleted => None,
425 }
426 }
427
428 pub fn exists(&self) -> bool {
429 match self {
430 DiskState::New => false,
431 DiskState::Present { .. } => true,
432 DiskState::Deleted => false,
433 }
434 }
435}
436
437/// The file associated with a buffer, in the case where the file is on the local disk.
438pub trait LocalFile: File {
439 /// Returns the absolute path of this file
440 fn abs_path(&self, cx: &App) -> PathBuf;
441
442 /// Loads the file contents from disk and returns them as a UTF-8 encoded string.
443 fn load(&self, cx: &App) -> Task<Result<String>>;
444
445 /// Loads the file's contents from disk.
446 fn load_bytes(&self, cx: &App) -> Task<Result<Vec<u8>>>;
447}
448
449/// The auto-indent behavior associated with an editing operation.
450/// For some editing operations, each affected line of text has its
451/// indentation recomputed. For other operations, the entire block
452/// of edited text is adjusted uniformly.
453#[derive(Clone, Debug)]
454pub enum AutoindentMode {
455 /// Indent each line of inserted text.
456 EachLine,
457 /// Apply the same indentation adjustment to all of the lines
458 /// in a given insertion.
459 Block {
460 /// The original indentation column of the first line of each
461 /// insertion, if it has been copied.
462 ///
463 /// Knowing this makes it possible to preserve the relative indentation
464 /// of every line in the insertion from when it was copied.
465 ///
466 /// If the original indent column is `a`, and the first line of insertion
467 /// is then auto-indented to column `b`, then every other line of
468 /// the insertion will be auto-indented to column `b - a`
469 original_indent_columns: Vec<Option<u32>>,
470 },
471}
472
473#[derive(Clone)]
474struct AutoindentRequest {
475 before_edit: BufferSnapshot,
476 entries: Vec<AutoindentRequestEntry>,
477 is_block_mode: bool,
478 ignore_empty_lines: bool,
479}
480
481#[derive(Debug, Clone)]
482struct AutoindentRequestEntry {
483 /// A range of the buffer whose indentation should be adjusted.
484 range: Range<Anchor>,
485 /// Whether or not these lines should be considered brand new, for the
486 /// purpose of auto-indent. When text is not new, its indentation will
487 /// only be adjusted if the suggested indentation level has *changed*
488 /// since the edit was made.
489 first_line_is_new: bool,
490 indent_size: IndentSize,
491 original_indent_column: Option<u32>,
492}
493
494#[derive(Debug)]
495struct IndentSuggestion {
496 basis_row: u32,
497 delta: Ordering,
498 within_error: bool,
499}
500
501struct BufferChunkHighlights<'a> {
502 captures: SyntaxMapCaptures<'a>,
503 next_capture: Option<SyntaxMapCapture<'a>>,
504 stack: Vec<(usize, HighlightId)>,
505 highlight_maps: Vec<HighlightMap>,
506}
507
508/// An iterator that yields chunks of a buffer's text, along with their
509/// syntax highlights and diagnostic status.
510pub struct BufferChunks<'a> {
511 buffer_snapshot: Option<&'a BufferSnapshot>,
512 range: Range<usize>,
513 chunks: text::Chunks<'a>,
514 diagnostic_endpoints: Option<Peekable<vec::IntoIter<DiagnosticEndpoint>>>,
515 error_depth: usize,
516 warning_depth: usize,
517 information_depth: usize,
518 hint_depth: usize,
519 unnecessary_depth: usize,
520 underline: bool,
521 highlights: Option<BufferChunkHighlights<'a>>,
522}
523
524/// A chunk of a buffer's text, along with its syntax highlight and
525/// diagnostic status.
526#[derive(Clone, Debug, Default)]
527pub struct Chunk<'a> {
528 /// The text of the chunk.
529 pub text: &'a str,
530 /// The syntax highlighting style of the chunk.
531 pub syntax_highlight_id: Option<HighlightId>,
532 /// The highlight style that has been applied to this chunk in
533 /// the editor.
534 pub highlight_style: Option<HighlightStyle>,
535 /// The severity of diagnostic associated with this chunk, if any.
536 pub diagnostic_severity: Option<DiagnosticSeverity>,
537 /// A bitset of which characters are tabs in this string.
538 pub tabs: u128,
539 /// Bitmap of character indices in this chunk
540 pub chars: u128,
541 /// Whether this chunk of text is marked as unnecessary.
542 pub is_unnecessary: bool,
543 /// Whether this chunk of text was originally a tab character.
544 pub is_tab: bool,
545 /// Whether this chunk of text was originally an inlay.
546 pub is_inlay: bool,
547 /// Whether to underline the corresponding text range in the editor.
548 pub underline: bool,
549}
550
551/// A set of edits to a given version of a buffer, computed asynchronously.
552#[derive(Debug)]
553pub struct Diff {
554 pub base_version: clock::Global,
555 pub line_ending: LineEnding,
556 pub edits: Vec<(Range<usize>, Arc<str>)>,
557}
558
559#[derive(Debug, Clone, Copy)]
560pub(crate) struct DiagnosticEndpoint {
561 offset: usize,
562 is_start: bool,
563 underline: bool,
564 severity: DiagnosticSeverity,
565 is_unnecessary: bool,
566}
567
568/// A class of characters, used for characterizing a run of text.
569#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
570pub enum CharKind {
571 /// Whitespace.
572 Whitespace,
573 /// Punctuation.
574 Punctuation,
575 /// Word.
576 Word,
577}
578
579/// Context for character classification within a specific scope.
580#[derive(Copy, Clone, Eq, PartialEq, Debug)]
581pub enum CharScopeContext {
582 /// Character classification for completion queries.
583 ///
584 /// This context treats certain characters as word constituents that would
585 /// normally be considered punctuation, such as '-' in Tailwind classes
586 /// ("bg-yellow-100") or '.' in import paths ("foo.ts").
587 Completion,
588 /// Character classification for linked edits.
589 ///
590 /// This context handles characters that should be treated as part of
591 /// identifiers during linked editing operations, such as '.' in JSX
592 /// component names like `<Animated.View>`.
593 LinkedEdit,
594}
595
596/// A runnable is a set of data about a region that could be resolved into a task
597pub struct Runnable {
598 pub tags: SmallVec<[RunnableTag; 1]>,
599 pub language: Arc<Language>,
600 pub buffer: BufferId,
601}
602
603#[derive(Default, Clone, Debug)]
604pub struct HighlightedText {
605 pub text: SharedString,
606 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
607}
608
609#[derive(Default, Debug)]
610struct HighlightedTextBuilder {
611 pub text: String,
612 highlights: Vec<(Range<usize>, HighlightStyle)>,
613}
614
615impl HighlightedText {
616 pub fn from_buffer_range<T: ToOffset>(
617 range: Range<T>,
618 snapshot: &text::BufferSnapshot,
619 syntax_snapshot: &SyntaxSnapshot,
620 override_style: Option<HighlightStyle>,
621 syntax_theme: &SyntaxTheme,
622 ) -> Self {
623 let mut highlighted_text = HighlightedTextBuilder::default();
624 highlighted_text.add_text_from_buffer_range(
625 range,
626 snapshot,
627 syntax_snapshot,
628 override_style,
629 syntax_theme,
630 );
631 highlighted_text.build()
632 }
633
634 pub fn to_styled_text(&self, default_style: &TextStyle) -> StyledText {
635 gpui::StyledText::new(self.text.clone())
636 .with_default_highlights(default_style, self.highlights.iter().cloned())
637 }
638
639 /// Returns the first line without leading whitespace unless highlighted
640 /// and a boolean indicating if there are more lines after
641 pub fn first_line_preview(self) -> (Self, bool) {
642 let newline_ix = self.text.find('\n').unwrap_or(self.text.len());
643 let first_line = &self.text[..newline_ix];
644
645 // Trim leading whitespace, unless an edit starts prior to it.
646 let mut preview_start_ix = first_line.len() - first_line.trim_start().len();
647 if let Some((first_highlight_range, _)) = self.highlights.first() {
648 preview_start_ix = preview_start_ix.min(first_highlight_range.start);
649 }
650
651 let preview_text = &first_line[preview_start_ix..];
652 let preview_highlights = self
653 .highlights
654 .into_iter()
655 .skip_while(|(range, _)| range.end <= preview_start_ix)
656 .take_while(|(range, _)| range.start < newline_ix)
657 .filter_map(|(mut range, highlight)| {
658 range.start = range.start.saturating_sub(preview_start_ix);
659 range.end = range.end.min(newline_ix).saturating_sub(preview_start_ix);
660 if range.is_empty() {
661 None
662 } else {
663 Some((range, highlight))
664 }
665 });
666
667 let preview = Self {
668 text: SharedString::new(preview_text),
669 highlights: preview_highlights.collect(),
670 };
671
672 (preview, self.text.len() > newline_ix)
673 }
674}
675
676impl HighlightedTextBuilder {
677 pub fn build(self) -> HighlightedText {
678 HighlightedText {
679 text: self.text.into(),
680 highlights: self.highlights,
681 }
682 }
683
684 pub fn add_text_from_buffer_range<T: ToOffset>(
685 &mut self,
686 range: Range<T>,
687 snapshot: &text::BufferSnapshot,
688 syntax_snapshot: &SyntaxSnapshot,
689 override_style: Option<HighlightStyle>,
690 syntax_theme: &SyntaxTheme,
691 ) {
692 let range = range.to_offset(snapshot);
693 for chunk in Self::highlighted_chunks(range, snapshot, syntax_snapshot) {
694 let start = self.text.len();
695 self.text.push_str(chunk.text);
696 let end = self.text.len();
697
698 if let Some(highlight_style) = chunk
699 .syntax_highlight_id
700 .and_then(|id| id.style(syntax_theme))
701 {
702 let highlight_style = override_style.map_or(highlight_style, |override_style| {
703 highlight_style.highlight(override_style)
704 });
705 self.highlights.push((start..end, highlight_style));
706 } else if let Some(override_style) = override_style {
707 self.highlights.push((start..end, override_style));
708 }
709 }
710 }
711
712 fn highlighted_chunks<'a>(
713 range: Range<usize>,
714 snapshot: &'a text::BufferSnapshot,
715 syntax_snapshot: &'a SyntaxSnapshot,
716 ) -> BufferChunks<'a> {
717 let captures = syntax_snapshot.captures(range.clone(), snapshot, |grammar| {
718 grammar
719 .highlights_config
720 .as_ref()
721 .map(|config| &config.query)
722 });
723
724 let highlight_maps = captures
725 .grammars()
726 .iter()
727 .map(|grammar| grammar.highlight_map())
728 .collect();
729
730 BufferChunks::new(
731 snapshot.as_rope(),
732 range,
733 Some((captures, highlight_maps)),
734 false,
735 None,
736 )
737 }
738}
739
740#[derive(Clone)]
741pub struct EditPreview {
742 old_snapshot: text::BufferSnapshot,
743 applied_edits_snapshot: text::BufferSnapshot,
744 syntax_snapshot: SyntaxSnapshot,
745}
746
747impl EditPreview {
748 pub fn highlight_edits(
749 &self,
750 current_snapshot: &BufferSnapshot,
751 edits: &[(Range<Anchor>, impl AsRef<str>)],
752 include_deletions: bool,
753 cx: &App,
754 ) -> HighlightedText {
755 let Some(visible_range_in_preview_snapshot) = self.compute_visible_range(edits) else {
756 return HighlightedText::default();
757 };
758
759 let mut highlighted_text = HighlightedTextBuilder::default();
760
761 let mut offset_in_preview_snapshot = visible_range_in_preview_snapshot.start;
762
763 let insertion_highlight_style = HighlightStyle {
764 background_color: Some(cx.theme().status().created_background),
765 ..Default::default()
766 };
767 let deletion_highlight_style = HighlightStyle {
768 background_color: Some(cx.theme().status().deleted_background),
769 ..Default::default()
770 };
771 let syntax_theme = cx.theme().syntax();
772
773 for (range, edit_text) in edits {
774 let edit_new_end_in_preview_snapshot = range
775 .end
776 .bias_right(&self.old_snapshot)
777 .to_offset(&self.applied_edits_snapshot);
778 let edit_start_in_preview_snapshot =
779 edit_new_end_in_preview_snapshot - edit_text.as_ref().len();
780
781 let unchanged_range_in_preview_snapshot =
782 offset_in_preview_snapshot..edit_start_in_preview_snapshot;
783 if !unchanged_range_in_preview_snapshot.is_empty() {
784 highlighted_text.add_text_from_buffer_range(
785 unchanged_range_in_preview_snapshot,
786 &self.applied_edits_snapshot,
787 &self.syntax_snapshot,
788 None,
789 syntax_theme,
790 );
791 }
792
793 let range_in_current_snapshot = range.to_offset(current_snapshot);
794 if include_deletions && !range_in_current_snapshot.is_empty() {
795 highlighted_text.add_text_from_buffer_range(
796 range_in_current_snapshot,
797 ¤t_snapshot.text,
798 ¤t_snapshot.syntax,
799 Some(deletion_highlight_style),
800 syntax_theme,
801 );
802 }
803
804 if !edit_text.as_ref().is_empty() {
805 highlighted_text.add_text_from_buffer_range(
806 edit_start_in_preview_snapshot..edit_new_end_in_preview_snapshot,
807 &self.applied_edits_snapshot,
808 &self.syntax_snapshot,
809 Some(insertion_highlight_style),
810 syntax_theme,
811 );
812 }
813
814 offset_in_preview_snapshot = edit_new_end_in_preview_snapshot;
815 }
816
817 highlighted_text.add_text_from_buffer_range(
818 offset_in_preview_snapshot..visible_range_in_preview_snapshot.end,
819 &self.applied_edits_snapshot,
820 &self.syntax_snapshot,
821 None,
822 syntax_theme,
823 );
824
825 highlighted_text.build()
826 }
827
828 fn compute_visible_range<T>(&self, edits: &[(Range<Anchor>, T)]) -> Option<Range<usize>> {
829 let (first, _) = edits.first()?;
830 let (last, _) = edits.last()?;
831
832 let start = first
833 .start
834 .bias_left(&self.old_snapshot)
835 .to_point(&self.applied_edits_snapshot);
836 let end = last
837 .end
838 .bias_right(&self.old_snapshot)
839 .to_point(&self.applied_edits_snapshot);
840
841 // Ensure that the first line of the first edit and the last line of the last edit are always fully visible
842 let range = Point::new(start.row, 0)
843 ..Point::new(end.row, self.applied_edits_snapshot.line_len(end.row));
844
845 Some(range.to_offset(&self.applied_edits_snapshot))
846 }
847}
848
849#[derive(Clone, Debug, PartialEq, Eq)]
850pub struct BracketMatch {
851 pub open_range: Range<usize>,
852 pub close_range: Range<usize>,
853 pub newline_only: bool,
854 pub color_index: Option<usize>,
855}
856
857impl BracketMatch {
858 pub fn bracket_ranges(self) -> (Range<usize>, Range<usize>) {
859 (self.open_range, self.close_range)
860 }
861}
862
863impl Buffer {
864 /// Create a new buffer with the given base text.
865 pub fn local<T: Into<String>>(base_text: T, cx: &Context<Self>) -> Self {
866 Self::build(
867 TextBuffer::new(
868 ReplicaId::LOCAL,
869 cx.entity_id().as_non_zero_u64().into(),
870 base_text.into(),
871 ),
872 None,
873 Capability::ReadWrite,
874 )
875 }
876
877 /// Create a new buffer with the given base text that has proper line endings and other normalization applied.
878 pub fn local_normalized(
879 base_text_normalized: Rope,
880 line_ending: LineEnding,
881 cx: &Context<Self>,
882 ) -> Self {
883 Self::build(
884 TextBuffer::new_normalized(
885 ReplicaId::LOCAL,
886 cx.entity_id().as_non_zero_u64().into(),
887 line_ending,
888 base_text_normalized,
889 ),
890 None,
891 Capability::ReadWrite,
892 )
893 }
894
895 /// Create a new buffer that is a replica of a remote buffer.
896 pub fn remote(
897 remote_id: BufferId,
898 replica_id: ReplicaId,
899 capability: Capability,
900 base_text: impl Into<String>,
901 ) -> Self {
902 Self::build(
903 TextBuffer::new(replica_id, remote_id, base_text.into()),
904 None,
905 capability,
906 )
907 }
908
909 /// Create a new buffer that is a replica of a remote buffer, populating its
910 /// state from the given protobuf message.
911 pub fn from_proto(
912 replica_id: ReplicaId,
913 capability: Capability,
914 message: proto::BufferState,
915 file: Option<Arc<dyn File>>,
916 ) -> Result<Self> {
917 let buffer_id = BufferId::new(message.id).context("Could not deserialize buffer_id")?;
918 let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text);
919 let mut this = Self::build(buffer, file, capability);
920 this.text.set_line_ending(proto::deserialize_line_ending(
921 rpc::proto::LineEnding::from_i32(message.line_ending).context("missing line_ending")?,
922 ));
923 this.saved_version = proto::deserialize_version(&message.saved_version);
924 this.saved_mtime = message.saved_mtime.map(|time| time.into());
925 Ok(this)
926 }
927
928 /// Serialize the buffer's state to a protobuf message.
929 pub fn to_proto(&self, cx: &App) -> proto::BufferState {
930 proto::BufferState {
931 id: self.remote_id().into(),
932 file: self.file.as_ref().map(|f| f.to_proto(cx)),
933 base_text: self.base_text().to_string(),
934 line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
935 saved_version: proto::serialize_version(&self.saved_version),
936 saved_mtime: self.saved_mtime.map(|time| time.into()),
937 }
938 }
939
940 /// Serialize as protobufs all of the changes to the buffer since the given version.
941 pub fn serialize_ops(
942 &self,
943 since: Option<clock::Global>,
944 cx: &App,
945 ) -> Task<Vec<proto::Operation>> {
946 let mut operations = Vec::new();
947 operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
948
949 operations.extend(self.remote_selections.iter().map(|(_, set)| {
950 proto::serialize_operation(&Operation::UpdateSelections {
951 selections: set.selections.clone(),
952 lamport_timestamp: set.lamport_timestamp,
953 line_mode: set.line_mode,
954 cursor_shape: set.cursor_shape,
955 })
956 }));
957
958 for (server_id, diagnostics) in &self.diagnostics {
959 operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
960 lamport_timestamp: self.diagnostics_timestamp,
961 server_id: *server_id,
962 diagnostics: diagnostics.iter().cloned().collect(),
963 }));
964 }
965
966 for (server_id, completions) in &self.completion_triggers_per_language_server {
967 operations.push(proto::serialize_operation(
968 &Operation::UpdateCompletionTriggers {
969 triggers: completions.iter().cloned().collect(),
970 lamport_timestamp: self.completion_triggers_timestamp,
971 server_id: *server_id,
972 },
973 ));
974 }
975
976 let text_operations = self.text.operations().clone();
977 cx.background_spawn(async move {
978 let since = since.unwrap_or_default();
979 operations.extend(
980 text_operations
981 .iter()
982 .filter(|(_, op)| !since.observed(op.timestamp()))
983 .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
984 );
985 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
986 operations
987 })
988 }
989
990 /// Assign a language to the buffer, returning the buffer.
991 pub fn with_language(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
992 self.set_language(Some(language), cx);
993 self
994 }
995
996 /// Returns the [`Capability`] of this buffer.
997 pub fn capability(&self) -> Capability {
998 self.capability
999 }
1000
1001 /// Whether this buffer can only be read.
1002 pub fn read_only(&self) -> bool {
1003 self.capability == Capability::ReadOnly
1004 }
1005
1006 /// Builds a [`Buffer`] with the given underlying [`TextBuffer`], diff base, [`File`] and [`Capability`].
1007 pub fn build(buffer: TextBuffer, file: Option<Arc<dyn File>>, capability: Capability) -> Self {
1008 let saved_mtime = file.as_ref().and_then(|file| file.disk_state().mtime());
1009 let snapshot = buffer.snapshot();
1010 let syntax_map = Mutex::new(SyntaxMap::new(&snapshot));
1011 let tree_sitter_data = TreeSitterData::new(snapshot);
1012 Self {
1013 saved_mtime,
1014 tree_sitter_data: Arc::new(Mutex::new(tree_sitter_data)),
1015 saved_version: buffer.version(),
1016 preview_version: buffer.version(),
1017 reload_task: None,
1018 transaction_depth: 0,
1019 was_dirty_before_starting_transaction: None,
1020 has_unsaved_edits: Cell::new((buffer.version(), false)),
1021 text: buffer,
1022 branch_state: None,
1023 file,
1024 capability,
1025 syntax_map,
1026 reparse: None,
1027 non_text_state_update_count: 0,
1028 sync_parse_timeout: Duration::from_millis(1),
1029 parse_status: watch::channel(ParseStatus::Idle),
1030 autoindent_requests: Default::default(),
1031 wait_for_autoindent_txs: Default::default(),
1032 pending_autoindent: Default::default(),
1033 language: None,
1034 remote_selections: Default::default(),
1035 diagnostics: Default::default(),
1036 diagnostics_timestamp: Lamport::MIN,
1037 completion_triggers: Default::default(),
1038 completion_triggers_per_language_server: Default::default(),
1039 completion_triggers_timestamp: Lamport::MIN,
1040 deferred_ops: OperationQueue::new(),
1041 has_conflict: false,
1042 change_bits: Default::default(),
1043 _subscriptions: Vec::new(),
1044 }
1045 }
1046
1047 pub fn build_snapshot(
1048 text: Rope,
1049 language: Option<Arc<Language>>,
1050 language_registry: Option<Arc<LanguageRegistry>>,
1051 cx: &mut App,
1052 ) -> impl Future<Output = BufferSnapshot> + use<> {
1053 let entity_id = cx.reserve_entity::<Self>().entity_id();
1054 let buffer_id = entity_id.as_non_zero_u64().into();
1055 async move {
1056 let text =
1057 TextBuffer::new_normalized(ReplicaId::LOCAL, buffer_id, Default::default(), text)
1058 .snapshot();
1059 let mut syntax = SyntaxMap::new(&text).snapshot();
1060 if let Some(language) = language.clone() {
1061 let language_registry = language_registry.clone();
1062 syntax.reparse(&text, language_registry, language);
1063 }
1064 let tree_sitter_data = TreeSitterData::new(text.clone());
1065 BufferSnapshot {
1066 text,
1067 syntax,
1068 file: None,
1069 diagnostics: Default::default(),
1070 remote_selections: Default::default(),
1071 tree_sitter_data: Arc::new(Mutex::new(tree_sitter_data)),
1072 language,
1073 non_text_state_update_count: 0,
1074 }
1075 }
1076 }
1077
1078 pub fn build_empty_snapshot(cx: &mut App) -> BufferSnapshot {
1079 let entity_id = cx.reserve_entity::<Self>().entity_id();
1080 let buffer_id = entity_id.as_non_zero_u64().into();
1081 let text = TextBuffer::new_normalized(
1082 ReplicaId::LOCAL,
1083 buffer_id,
1084 Default::default(),
1085 Rope::new(),
1086 )
1087 .snapshot();
1088 let syntax = SyntaxMap::new(&text).snapshot();
1089 let tree_sitter_data = TreeSitterData::new(text.clone());
1090 BufferSnapshot {
1091 text,
1092 syntax,
1093 tree_sitter_data: Arc::new(Mutex::new(tree_sitter_data)),
1094 file: None,
1095 diagnostics: Default::default(),
1096 remote_selections: Default::default(),
1097 language: None,
1098 non_text_state_update_count: 0,
1099 }
1100 }
1101
1102 #[cfg(any(test, feature = "test-support"))]
1103 pub fn build_snapshot_sync(
1104 text: Rope,
1105 language: Option<Arc<Language>>,
1106 language_registry: Option<Arc<LanguageRegistry>>,
1107 cx: &mut App,
1108 ) -> BufferSnapshot {
1109 let entity_id = cx.reserve_entity::<Self>().entity_id();
1110 let buffer_id = entity_id.as_non_zero_u64().into();
1111 let text =
1112 TextBuffer::new_normalized(ReplicaId::LOCAL, buffer_id, Default::default(), text)
1113 .snapshot();
1114 let mut syntax = SyntaxMap::new(&text).snapshot();
1115 if let Some(language) = language.clone() {
1116 syntax.reparse(&text, language_registry, language);
1117 }
1118 let tree_sitter_data = TreeSitterData::new(text.clone());
1119 BufferSnapshot {
1120 text,
1121 syntax,
1122 tree_sitter_data: Arc::new(Mutex::new(tree_sitter_data)),
1123 file: None,
1124 diagnostics: Default::default(),
1125 remote_selections: Default::default(),
1126 language,
1127 non_text_state_update_count: 0,
1128 }
1129 }
1130
1131 /// Retrieve a snapshot of the buffer's current state. This is computationally
1132 /// cheap, and allows reading from the buffer on a background thread.
1133 pub fn snapshot(&self) -> BufferSnapshot {
1134 let text = self.text.snapshot();
1135 let mut syntax_map = self.syntax_map.lock();
1136 syntax_map.interpolate(&text);
1137 let syntax = syntax_map.snapshot();
1138
1139 BufferSnapshot {
1140 text,
1141 syntax,
1142 tree_sitter_data: self.tree_sitter_data.clone(),
1143 file: self.file.clone(),
1144 remote_selections: self.remote_selections.clone(),
1145 diagnostics: self.diagnostics.clone(),
1146 language: self.language.clone(),
1147 non_text_state_update_count: self.non_text_state_update_count,
1148 }
1149 }
1150
1151 pub fn branch(&mut self, cx: &mut Context<Self>) -> Entity<Self> {
1152 let this = cx.entity();
1153 cx.new(|cx| {
1154 let mut branch = Self {
1155 branch_state: Some(BufferBranchState {
1156 base_buffer: this.clone(),
1157 merged_operations: Default::default(),
1158 }),
1159 language: self.language.clone(),
1160 has_conflict: self.has_conflict,
1161 has_unsaved_edits: Cell::new(self.has_unsaved_edits.get_mut().clone()),
1162 _subscriptions: vec![cx.subscribe(&this, Self::on_base_buffer_event)],
1163 ..Self::build(self.text.branch(), self.file.clone(), self.capability())
1164 };
1165 if let Some(language_registry) = self.language_registry() {
1166 branch.set_language_registry(language_registry);
1167 }
1168
1169 // Reparse the branch buffer so that we get syntax highlighting immediately.
1170 branch.reparse(cx);
1171
1172 branch
1173 })
1174 }
1175
1176 pub fn preview_edits(
1177 &self,
1178 edits: Arc<[(Range<Anchor>, Arc<str>)]>,
1179 cx: &App,
1180 ) -> Task<EditPreview> {
1181 let registry = self.language_registry();
1182 let language = self.language().cloned();
1183 let old_snapshot = self.text.snapshot();
1184 let mut branch_buffer = self.text.branch();
1185 let mut syntax_snapshot = self.syntax_map.lock().snapshot();
1186 cx.background_spawn(async move {
1187 if !edits.is_empty() {
1188 if let Some(language) = language.clone() {
1189 syntax_snapshot.reparse(&old_snapshot, registry.clone(), language);
1190 }
1191
1192 branch_buffer.edit(edits.iter().cloned());
1193 let snapshot = branch_buffer.snapshot();
1194 syntax_snapshot.interpolate(&snapshot);
1195
1196 if let Some(language) = language {
1197 syntax_snapshot.reparse(&snapshot, registry, language);
1198 }
1199 }
1200 EditPreview {
1201 old_snapshot,
1202 applied_edits_snapshot: branch_buffer.snapshot(),
1203 syntax_snapshot,
1204 }
1205 })
1206 }
1207
1208 /// Applies all of the changes in this buffer that intersect any of the
1209 /// given `ranges` to its base buffer.
1210 ///
1211 /// If `ranges` is empty, then all changes will be applied. This buffer must
1212 /// be a branch buffer to call this method.
1213 pub fn merge_into_base(&mut self, ranges: Vec<Range<usize>>, cx: &mut Context<Self>) {
1214 let Some(base_buffer) = self.base_buffer() else {
1215 debug_panic!("not a branch buffer");
1216 return;
1217 };
1218
1219 let mut ranges = if ranges.is_empty() {
1220 &[0..usize::MAX]
1221 } else {
1222 ranges.as_slice()
1223 }
1224 .iter()
1225 .peekable();
1226
1227 let mut edits = Vec::new();
1228 for edit in self.edits_since::<usize>(&base_buffer.read(cx).version()) {
1229 let mut is_included = false;
1230 while let Some(range) = ranges.peek() {
1231 if range.end < edit.new.start {
1232 ranges.next().unwrap();
1233 } else {
1234 if range.start <= edit.new.end {
1235 is_included = true;
1236 }
1237 break;
1238 }
1239 }
1240
1241 if is_included {
1242 edits.push((
1243 edit.old.clone(),
1244 self.text_for_range(edit.new.clone()).collect::<String>(),
1245 ));
1246 }
1247 }
1248
1249 let operation = base_buffer.update(cx, |base_buffer, cx| {
1250 // cx.emit(BufferEvent::DiffBaseChanged);
1251 base_buffer.edit(edits, None, cx)
1252 });
1253
1254 if let Some(operation) = operation
1255 && let Some(BufferBranchState {
1256 merged_operations, ..
1257 }) = &mut self.branch_state
1258 {
1259 merged_operations.push(operation);
1260 }
1261 }
1262
1263 fn on_base_buffer_event(
1264 &mut self,
1265 _: Entity<Buffer>,
1266 event: &BufferEvent,
1267 cx: &mut Context<Self>,
1268 ) {
1269 let BufferEvent::Operation { operation, .. } = event else {
1270 return;
1271 };
1272 let Some(BufferBranchState {
1273 merged_operations, ..
1274 }) = &mut self.branch_state
1275 else {
1276 return;
1277 };
1278
1279 let mut operation_to_undo = None;
1280 if let Operation::Buffer(text::Operation::Edit(operation)) = &operation
1281 && let Ok(ix) = merged_operations.binary_search(&operation.timestamp)
1282 {
1283 merged_operations.remove(ix);
1284 operation_to_undo = Some(operation.timestamp);
1285 }
1286
1287 self.apply_ops([operation.clone()], cx);
1288
1289 if let Some(timestamp) = operation_to_undo {
1290 let counts = [(timestamp, u32::MAX)].into_iter().collect();
1291 self.undo_operations(counts, cx);
1292 }
1293 }
1294
1295 #[cfg(test)]
1296 pub(crate) fn as_text_snapshot(&self) -> &text::BufferSnapshot {
1297 &self.text
1298 }
1299
1300 /// Retrieve a snapshot of the buffer's raw text, without any
1301 /// language-related state like the syntax tree or diagnostics.
1302 pub fn text_snapshot(&self) -> text::BufferSnapshot {
1303 self.text.snapshot()
1304 }
1305
1306 /// The file associated with the buffer, if any.
1307 pub fn file(&self) -> Option<&Arc<dyn File>> {
1308 self.file.as_ref()
1309 }
1310
1311 /// The version of the buffer that was last saved or reloaded from disk.
1312 pub fn saved_version(&self) -> &clock::Global {
1313 &self.saved_version
1314 }
1315
1316 /// The mtime of the buffer's file when the buffer was last saved or reloaded from disk.
1317 pub fn saved_mtime(&self) -> Option<MTime> {
1318 self.saved_mtime
1319 }
1320
1321 /// Assign a language to the buffer.
1322 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1323 self.non_text_state_update_count += 1;
1324 self.syntax_map.lock().clear(&self.text);
1325 self.language = language;
1326 self.was_changed();
1327 self.reparse(cx);
1328 cx.emit(BufferEvent::LanguageChanged);
1329 }
1330
1331 /// Assign a language registry to the buffer. This allows the buffer to retrieve
1332 /// other languages if parts of the buffer are written in different languages.
1333 pub fn set_language_registry(&self, language_registry: Arc<LanguageRegistry>) {
1334 self.syntax_map
1335 .lock()
1336 .set_language_registry(language_registry);
1337 }
1338
1339 pub fn language_registry(&self) -> Option<Arc<LanguageRegistry>> {
1340 self.syntax_map.lock().language_registry()
1341 }
1342
1343 /// Assign the line ending type to the buffer.
1344 pub fn set_line_ending(&mut self, line_ending: LineEnding, cx: &mut Context<Self>) {
1345 self.text.set_line_ending(line_ending);
1346
1347 let lamport_timestamp = self.text.lamport_clock.tick();
1348 self.send_operation(
1349 Operation::UpdateLineEnding {
1350 line_ending,
1351 lamport_timestamp,
1352 },
1353 true,
1354 cx,
1355 );
1356 }
1357
1358 /// Assign the buffer a new [`Capability`].
1359 pub fn set_capability(&mut self, capability: Capability, cx: &mut Context<Self>) {
1360 if self.capability != capability {
1361 self.capability = capability;
1362 cx.emit(BufferEvent::CapabilityChanged)
1363 }
1364 }
1365
1366 /// This method is called to signal that the buffer has been saved.
1367 pub fn did_save(
1368 &mut self,
1369 version: clock::Global,
1370 mtime: Option<MTime>,
1371 cx: &mut Context<Self>,
1372 ) {
1373 self.saved_version = version.clone();
1374 self.has_unsaved_edits.set((version, false));
1375 self.has_conflict = false;
1376 self.saved_mtime = mtime;
1377 self.was_changed();
1378 cx.emit(BufferEvent::Saved);
1379 cx.notify();
1380 }
1381
1382 /// Reloads the contents of the buffer from disk.
1383 pub fn reload(&mut self, cx: &Context<Self>) -> oneshot::Receiver<Option<Transaction>> {
1384 let (tx, rx) = futures::channel::oneshot::channel();
1385 let prev_version = self.text.version();
1386 self.reload_task = Some(cx.spawn(async move |this, cx| {
1387 let Some((new_mtime, new_text)) = this.update(cx, |this, cx| {
1388 let file = this.file.as_ref()?.as_local()?;
1389
1390 Some((file.disk_state().mtime(), file.load(cx)))
1391 })?
1392 else {
1393 return Ok(());
1394 };
1395
1396 let new_text = new_text.await?;
1397 let diff = this
1398 .update(cx, |this, cx| this.diff(new_text.clone(), cx))?
1399 .await;
1400 this.update(cx, |this, cx| {
1401 if this.version() == diff.base_version {
1402 this.finalize_last_transaction();
1403 this.apply_diff(diff, cx);
1404 tx.send(this.finalize_last_transaction().cloned()).ok();
1405 this.has_conflict = false;
1406 this.did_reload(this.version(), this.line_ending(), new_mtime, cx);
1407 } else {
1408 if !diff.edits.is_empty()
1409 || this
1410 .edits_since::<usize>(&diff.base_version)
1411 .next()
1412 .is_some()
1413 {
1414 this.has_conflict = true;
1415 }
1416
1417 this.did_reload(prev_version, this.line_ending(), this.saved_mtime, cx);
1418 }
1419
1420 this.reload_task.take();
1421 })
1422 }));
1423 rx
1424 }
1425
1426 /// This method is called to signal that the buffer has been reloaded.
1427 pub fn did_reload(
1428 &mut self,
1429 version: clock::Global,
1430 line_ending: LineEnding,
1431 mtime: Option<MTime>,
1432 cx: &mut Context<Self>,
1433 ) {
1434 self.saved_version = version;
1435 self.has_unsaved_edits
1436 .set((self.saved_version.clone(), false));
1437 self.text.set_line_ending(line_ending);
1438 self.saved_mtime = mtime;
1439 cx.emit(BufferEvent::Reloaded);
1440 cx.notify();
1441 }
1442
1443 /// Updates the [`File`] backing this buffer. This should be called when
1444 /// the file has changed or has been deleted.
1445 pub fn file_updated(&mut self, new_file: Arc<dyn File>, cx: &mut Context<Self>) {
1446 let was_dirty = self.is_dirty();
1447 let mut file_changed = false;
1448
1449 if let Some(old_file) = self.file.as_ref() {
1450 if new_file.path() != old_file.path() {
1451 file_changed = true;
1452 }
1453
1454 let old_state = old_file.disk_state();
1455 let new_state = new_file.disk_state();
1456 if old_state != new_state {
1457 file_changed = true;
1458 if !was_dirty && matches!(new_state, DiskState::Present { .. }) {
1459 cx.emit(BufferEvent::ReloadNeeded)
1460 }
1461 }
1462 } else {
1463 file_changed = true;
1464 };
1465
1466 self.file = Some(new_file);
1467 if file_changed {
1468 self.was_changed();
1469 self.non_text_state_update_count += 1;
1470 if was_dirty != self.is_dirty() {
1471 cx.emit(BufferEvent::DirtyChanged);
1472 }
1473 cx.emit(BufferEvent::FileHandleChanged);
1474 cx.notify();
1475 }
1476 }
1477
1478 pub fn base_buffer(&self) -> Option<Entity<Self>> {
1479 Some(self.branch_state.as_ref()?.base_buffer.clone())
1480 }
1481
1482 /// Returns the primary [`Language`] assigned to this [`Buffer`].
1483 pub fn language(&self) -> Option<&Arc<Language>> {
1484 self.language.as_ref()
1485 }
1486
1487 /// Returns the [`Language`] at the given location.
1488 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
1489 let offset = position.to_offset(self);
1490 let mut is_first = true;
1491 let start_anchor = self.anchor_before(offset);
1492 let end_anchor = self.anchor_after(offset);
1493 self.syntax_map
1494 .lock()
1495 .layers_for_range(offset..offset, &self.text, false)
1496 .filter(|layer| {
1497 if is_first {
1498 is_first = false;
1499 return true;
1500 }
1501
1502 layer
1503 .included_sub_ranges
1504 .map(|sub_ranges| {
1505 sub_ranges.iter().any(|sub_range| {
1506 let is_before_start = sub_range.end.cmp(&start_anchor, self).is_lt();
1507 let is_after_end = sub_range.start.cmp(&end_anchor, self).is_gt();
1508 !is_before_start && !is_after_end
1509 })
1510 })
1511 .unwrap_or(true)
1512 })
1513 .last()
1514 .map(|info| info.language.clone())
1515 .or_else(|| self.language.clone())
1516 }
1517
1518 /// Returns each [`Language`] for the active syntax layers at the given location.
1519 pub fn languages_at<D: ToOffset>(&self, position: D) -> Vec<Arc<Language>> {
1520 let offset = position.to_offset(self);
1521 let mut languages: Vec<Arc<Language>> = self
1522 .syntax_map
1523 .lock()
1524 .layers_for_range(offset..offset, &self.text, false)
1525 .map(|info| info.language.clone())
1526 .collect();
1527
1528 if languages.is_empty()
1529 && let Some(buffer_language) = self.language()
1530 {
1531 languages.push(buffer_language.clone());
1532 }
1533
1534 languages
1535 }
1536
1537 /// An integer version number that accounts for all updates besides
1538 /// the buffer's text itself (which is versioned via a version vector).
1539 pub fn non_text_state_update_count(&self) -> usize {
1540 self.non_text_state_update_count
1541 }
1542
1543 /// Whether the buffer is being parsed in the background.
1544 #[cfg(any(test, feature = "test-support"))]
1545 pub fn is_parsing(&self) -> bool {
1546 self.reparse.is_some()
1547 }
1548
1549 /// Indicates whether the buffer contains any regions that may be
1550 /// written in a language that hasn't been loaded yet.
1551 pub fn contains_unknown_injections(&self) -> bool {
1552 self.syntax_map.lock().contains_unknown_injections()
1553 }
1554
1555 #[cfg(any(test, feature = "test-support"))]
1556 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
1557 self.sync_parse_timeout = timeout;
1558 }
1559
1560 /// Called after an edit to synchronize the buffer's main parse tree with
1561 /// the buffer's new underlying state.
1562 ///
1563 /// Locks the syntax map and interpolates the edits since the last reparse
1564 /// into the foreground syntax tree.
1565 ///
1566 /// Then takes a stable snapshot of the syntax map before unlocking it.
1567 /// The snapshot with the interpolated edits is sent to a background thread,
1568 /// where we ask Tree-sitter to perform an incremental parse.
1569 ///
1570 /// Meanwhile, in the foreground, we block the main thread for up to 1ms
1571 /// waiting on the parse to complete. As soon as it completes, we proceed
1572 /// synchronously, unless a 1ms timeout elapses.
1573 ///
1574 /// If we time out waiting on the parse, we spawn a second task waiting
1575 /// until the parse does complete and return with the interpolated tree still
1576 /// in the foreground. When the background parse completes, call back into
1577 /// the main thread and assign the foreground parse state.
1578 ///
1579 /// If the buffer or grammar changed since the start of the background parse,
1580 /// initiate an additional reparse recursively. To avoid concurrent parses
1581 /// for the same buffer, we only initiate a new parse if we are not already
1582 /// parsing in the background.
1583 pub fn reparse(&mut self, cx: &mut Context<Self>) {
1584 if self.reparse.is_some() {
1585 return;
1586 }
1587 let language = if let Some(language) = self.language.clone() {
1588 language
1589 } else {
1590 return;
1591 };
1592
1593 let text = self.text_snapshot();
1594 let parsed_version = self.version();
1595
1596 let mut syntax_map = self.syntax_map.lock();
1597 syntax_map.interpolate(&text);
1598 let language_registry = syntax_map.language_registry();
1599 let mut syntax_snapshot = syntax_map.snapshot();
1600 drop(syntax_map);
1601
1602 let parse_task = cx.background_spawn({
1603 let language = language.clone();
1604 let language_registry = language_registry.clone();
1605 async move {
1606 syntax_snapshot.reparse(&text, language_registry, language);
1607 syntax_snapshot
1608 }
1609 });
1610
1611 self.parse_status.0.send(ParseStatus::Parsing).unwrap();
1612 match cx
1613 .background_executor()
1614 .block_with_timeout(self.sync_parse_timeout, parse_task)
1615 {
1616 Ok(new_syntax_snapshot) => {
1617 self.did_finish_parsing(new_syntax_snapshot, cx);
1618 self.reparse = None;
1619 }
1620 Err(parse_task) => {
1621 // todo(lw): hot foreground spawn
1622 self.reparse = Some(cx.spawn(async move |this, cx| {
1623 let new_syntax_map = cx.background_spawn(parse_task).await;
1624 this.update(cx, move |this, cx| {
1625 let grammar_changed = || {
1626 this.language.as_ref().is_none_or(|current_language| {
1627 !Arc::ptr_eq(&language, current_language)
1628 })
1629 };
1630 let language_registry_changed = || {
1631 new_syntax_map.contains_unknown_injections()
1632 && language_registry.is_some_and(|registry| {
1633 registry.version() != new_syntax_map.language_registry_version()
1634 })
1635 };
1636 let parse_again = this.version.changed_since(&parsed_version)
1637 || language_registry_changed()
1638 || grammar_changed();
1639 this.did_finish_parsing(new_syntax_map, cx);
1640 this.reparse = None;
1641 if parse_again {
1642 this.reparse(cx);
1643 }
1644 })
1645 .ok();
1646 }));
1647 }
1648 }
1649 }
1650
1651 fn did_finish_parsing(&mut self, syntax_snapshot: SyntaxSnapshot, cx: &mut Context<Self>) {
1652 self.was_changed();
1653 self.non_text_state_update_count += 1;
1654 self.syntax_map.lock().did_parse(syntax_snapshot);
1655 self.request_autoindent(cx);
1656 self.parse_status.0.send(ParseStatus::Idle).unwrap();
1657 self.tree_sitter_data.lock().clear();
1658 cx.emit(BufferEvent::Reparsed);
1659 cx.notify();
1660 }
1661
1662 pub fn parse_status(&self) -> watch::Receiver<ParseStatus> {
1663 self.parse_status.1.clone()
1664 }
1665
1666 /// Wait until the buffer is no longer parsing
1667 pub fn parsing_idle(&self) -> impl Future<Output = ()> + use<> {
1668 let mut parse_status = self.parse_status();
1669 async move {
1670 while *parse_status.borrow() != ParseStatus::Idle {
1671 if parse_status.changed().await.is_err() {
1672 break;
1673 }
1674 }
1675 }
1676 }
1677
1678 /// Assign to the buffer a set of diagnostics created by a given language server.
1679 pub fn update_diagnostics(
1680 &mut self,
1681 server_id: LanguageServerId,
1682 diagnostics: DiagnosticSet,
1683 cx: &mut Context<Self>,
1684 ) {
1685 let lamport_timestamp = self.text.lamport_clock.tick();
1686 let op = Operation::UpdateDiagnostics {
1687 server_id,
1688 diagnostics: diagnostics.iter().cloned().collect(),
1689 lamport_timestamp,
1690 };
1691
1692 self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
1693 self.send_operation(op, true, cx);
1694 }
1695
1696 pub fn buffer_diagnostics(
1697 &self,
1698 for_server: Option<LanguageServerId>,
1699 ) -> Vec<&DiagnosticEntry<Anchor>> {
1700 match for_server {
1701 Some(server_id) => match self.diagnostics.binary_search_by_key(&server_id, |v| v.0) {
1702 Ok(idx) => self.diagnostics[idx].1.iter().collect(),
1703 Err(_) => Vec::new(),
1704 },
1705 None => self
1706 .diagnostics
1707 .iter()
1708 .flat_map(|(_, diagnostic_set)| diagnostic_set.iter())
1709 .collect(),
1710 }
1711 }
1712
1713 fn request_autoindent(&mut self, cx: &mut Context<Self>) {
1714 if let Some(indent_sizes) = self.compute_autoindents() {
1715 let indent_sizes = cx.background_spawn(indent_sizes);
1716 match cx
1717 .background_executor()
1718 .block_with_timeout(Duration::from_micros(500), indent_sizes)
1719 {
1720 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
1721 Err(indent_sizes) => {
1722 self.pending_autoindent = Some(cx.spawn(async move |this, cx| {
1723 let indent_sizes = indent_sizes.await;
1724 this.update(cx, |this, cx| {
1725 this.apply_autoindents(indent_sizes, cx);
1726 })
1727 .ok();
1728 }));
1729 }
1730 }
1731 } else {
1732 self.autoindent_requests.clear();
1733 for tx in self.wait_for_autoindent_txs.drain(..) {
1734 tx.send(()).ok();
1735 }
1736 }
1737 }
1738
1739 fn compute_autoindents(
1740 &self,
1741 ) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>> + use<>> {
1742 let max_rows_between_yields = 100;
1743 let snapshot = self.snapshot();
1744 if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
1745 return None;
1746 }
1747
1748 let autoindent_requests = self.autoindent_requests.clone();
1749 Some(async move {
1750 let mut indent_sizes = BTreeMap::<u32, (IndentSize, bool)>::new();
1751 for request in autoindent_requests {
1752 // Resolve each edited range to its row in the current buffer and in the
1753 // buffer before this batch of edits.
1754 let mut row_ranges = Vec::new();
1755 let mut old_to_new_rows = BTreeMap::new();
1756 let mut language_indent_sizes_by_new_row = Vec::new();
1757 for entry in &request.entries {
1758 let position = entry.range.start;
1759 let new_row = position.to_point(&snapshot).row;
1760 let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
1761 language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
1762
1763 if !entry.first_line_is_new {
1764 let old_row = position.to_point(&request.before_edit).row;
1765 old_to_new_rows.insert(old_row, new_row);
1766 }
1767 row_ranges.push((new_row..new_end_row, entry.original_indent_column));
1768 }
1769
1770 // Build a map containing the suggested indentation for each of the edited lines
1771 // with respect to the state of the buffer before these edits. This map is keyed
1772 // by the rows for these lines in the current state of the buffer.
1773 let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
1774 let old_edited_ranges =
1775 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
1776 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1777 let mut language_indent_size = IndentSize::default();
1778 for old_edited_range in old_edited_ranges {
1779 let suggestions = request
1780 .before_edit
1781 .suggest_autoindents(old_edited_range.clone())
1782 .into_iter()
1783 .flatten();
1784 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1785 if let Some(suggestion) = suggestion {
1786 let new_row = *old_to_new_rows.get(&old_row).unwrap();
1787
1788 // Find the indent size based on the language for this row.
1789 while let Some((row, size)) = language_indent_sizes.peek() {
1790 if *row > new_row {
1791 break;
1792 }
1793 language_indent_size = *size;
1794 language_indent_sizes.next();
1795 }
1796
1797 let suggested_indent = old_to_new_rows
1798 .get(&suggestion.basis_row)
1799 .and_then(|from_row| {
1800 Some(old_suggestions.get(from_row).copied()?.0)
1801 })
1802 .unwrap_or_else(|| {
1803 request
1804 .before_edit
1805 .indent_size_for_line(suggestion.basis_row)
1806 })
1807 .with_delta(suggestion.delta, language_indent_size);
1808 old_suggestions
1809 .insert(new_row, (suggested_indent, suggestion.within_error));
1810 }
1811 }
1812 yield_now().await;
1813 }
1814
1815 // Compute new suggestions for each line, but only include them in the result
1816 // if they differ from the old suggestion for that line.
1817 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1818 let mut language_indent_size = IndentSize::default();
1819 for (row_range, original_indent_column) in row_ranges {
1820 let new_edited_row_range = if request.is_block_mode {
1821 row_range.start..row_range.start + 1
1822 } else {
1823 row_range.clone()
1824 };
1825
1826 let suggestions = snapshot
1827 .suggest_autoindents(new_edited_row_range.clone())
1828 .into_iter()
1829 .flatten();
1830 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
1831 if let Some(suggestion) = suggestion {
1832 // Find the indent size based on the language for this row.
1833 while let Some((row, size)) = language_indent_sizes.peek() {
1834 if *row > new_row {
1835 break;
1836 }
1837 language_indent_size = *size;
1838 language_indent_sizes.next();
1839 }
1840
1841 let suggested_indent = indent_sizes
1842 .get(&suggestion.basis_row)
1843 .copied()
1844 .map(|e| e.0)
1845 .unwrap_or_else(|| {
1846 snapshot.indent_size_for_line(suggestion.basis_row)
1847 })
1848 .with_delta(suggestion.delta, language_indent_size);
1849
1850 if old_suggestions.get(&new_row).is_none_or(
1851 |(old_indentation, was_within_error)| {
1852 suggested_indent != *old_indentation
1853 && (!suggestion.within_error || *was_within_error)
1854 },
1855 ) {
1856 indent_sizes.insert(
1857 new_row,
1858 (suggested_indent, request.ignore_empty_lines),
1859 );
1860 }
1861 }
1862 }
1863
1864 if let (true, Some(original_indent_column)) =
1865 (request.is_block_mode, original_indent_column)
1866 {
1867 let new_indent =
1868 if let Some((indent, _)) = indent_sizes.get(&row_range.start) {
1869 *indent
1870 } else {
1871 snapshot.indent_size_for_line(row_range.start)
1872 };
1873 let delta = new_indent.len as i64 - original_indent_column as i64;
1874 if delta != 0 {
1875 for row in row_range.skip(1) {
1876 indent_sizes.entry(row).or_insert_with(|| {
1877 let mut size = snapshot.indent_size_for_line(row);
1878 if size.kind == new_indent.kind {
1879 match delta.cmp(&0) {
1880 Ordering::Greater => size.len += delta as u32,
1881 Ordering::Less => {
1882 size.len = size.len.saturating_sub(-delta as u32)
1883 }
1884 Ordering::Equal => {}
1885 }
1886 }
1887 (size, request.ignore_empty_lines)
1888 });
1889 }
1890 }
1891 }
1892
1893 yield_now().await;
1894 }
1895 }
1896
1897 indent_sizes
1898 .into_iter()
1899 .filter_map(|(row, (indent, ignore_empty_lines))| {
1900 if ignore_empty_lines && snapshot.line_len(row) == 0 {
1901 None
1902 } else {
1903 Some((row, indent))
1904 }
1905 })
1906 .collect()
1907 })
1908 }
1909
1910 fn apply_autoindents(
1911 &mut self,
1912 indent_sizes: BTreeMap<u32, IndentSize>,
1913 cx: &mut Context<Self>,
1914 ) {
1915 self.autoindent_requests.clear();
1916 for tx in self.wait_for_autoindent_txs.drain(..) {
1917 tx.send(()).ok();
1918 }
1919
1920 let edits: Vec<_> = indent_sizes
1921 .into_iter()
1922 .filter_map(|(row, indent_size)| {
1923 let current_size = indent_size_for_line(self, row);
1924 Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
1925 })
1926 .collect();
1927
1928 let preserve_preview = self.preserve_preview();
1929 self.edit(edits, None, cx);
1930 if preserve_preview {
1931 self.refresh_preview();
1932 }
1933 }
1934
1935 /// Create a minimal edit that will cause the given row to be indented
1936 /// with the given size. After applying this edit, the length of the line
1937 /// will always be at least `new_size.len`.
1938 pub fn edit_for_indent_size_adjustment(
1939 row: u32,
1940 current_size: IndentSize,
1941 new_size: IndentSize,
1942 ) -> Option<(Range<Point>, String)> {
1943 if new_size.kind == current_size.kind {
1944 match new_size.len.cmp(¤t_size.len) {
1945 Ordering::Greater => {
1946 let point = Point::new(row, 0);
1947 Some((
1948 point..point,
1949 iter::repeat(new_size.char())
1950 .take((new_size.len - current_size.len) as usize)
1951 .collect::<String>(),
1952 ))
1953 }
1954
1955 Ordering::Less => Some((
1956 Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
1957 String::new(),
1958 )),
1959
1960 Ordering::Equal => None,
1961 }
1962 } else {
1963 Some((
1964 Point::new(row, 0)..Point::new(row, current_size.len),
1965 iter::repeat(new_size.char())
1966 .take(new_size.len as usize)
1967 .collect::<String>(),
1968 ))
1969 }
1970 }
1971
1972 /// Spawns a background task that asynchronously computes a `Diff` between the buffer's text
1973 /// and the given new text.
1974 pub fn diff(&self, mut new_text: String, cx: &App) -> Task<Diff> {
1975 let old_text = self.as_rope().clone();
1976 let base_version = self.version();
1977 cx.background_executor()
1978 .spawn_labeled(*BUFFER_DIFF_TASK, async move {
1979 let old_text = old_text.to_string();
1980 let line_ending = LineEnding::detect(&new_text);
1981 LineEnding::normalize(&mut new_text);
1982 let edits = text_diff(&old_text, &new_text);
1983 Diff {
1984 base_version,
1985 line_ending,
1986 edits,
1987 }
1988 })
1989 }
1990
1991 /// Spawns a background task that searches the buffer for any whitespace
1992 /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
1993 pub fn remove_trailing_whitespace(&self, cx: &App) -> Task<Diff> {
1994 let old_text = self.as_rope().clone();
1995 let line_ending = self.line_ending();
1996 let base_version = self.version();
1997 cx.background_spawn(async move {
1998 let ranges = trailing_whitespace_ranges(&old_text);
1999 let empty = Arc::<str>::from("");
2000 Diff {
2001 base_version,
2002 line_ending,
2003 edits: ranges
2004 .into_iter()
2005 .map(|range| (range, empty.clone()))
2006 .collect(),
2007 }
2008 })
2009 }
2010
2011 /// Ensures that the buffer ends with a single newline character, and
2012 /// no other whitespace. Skips if the buffer is empty.
2013 pub fn ensure_final_newline(&mut self, cx: &mut Context<Self>) {
2014 let len = self.len();
2015 if len == 0 {
2016 return;
2017 }
2018 let mut offset = len;
2019 for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
2020 let non_whitespace_len = chunk
2021 .trim_end_matches(|c: char| c.is_ascii_whitespace())
2022 .len();
2023 offset -= chunk.len();
2024 offset += non_whitespace_len;
2025 if non_whitespace_len != 0 {
2026 if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
2027 return;
2028 }
2029 break;
2030 }
2031 }
2032 self.edit([(offset..len, "\n")], None, cx);
2033 }
2034
2035 /// Applies a diff to the buffer. If the buffer has changed since the given diff was
2036 /// calculated, then adjust the diff to account for those changes, and discard any
2037 /// parts of the diff that conflict with those changes.
2038 pub fn apply_diff(&mut self, diff: Diff, cx: &mut Context<Self>) -> Option<TransactionId> {
2039 let snapshot = self.snapshot();
2040 let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
2041 let mut delta = 0;
2042 let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
2043 while let Some(edit_since) = edits_since.peek() {
2044 // If the edit occurs after a diff hunk, then it does not
2045 // affect that hunk.
2046 if edit_since.old.start > range.end {
2047 break;
2048 }
2049 // If the edit precedes the diff hunk, then adjust the hunk
2050 // to reflect the edit.
2051 else if edit_since.old.end < range.start {
2052 delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
2053 edits_since.next();
2054 }
2055 // If the edit intersects a diff hunk, then discard that hunk.
2056 else {
2057 return None;
2058 }
2059 }
2060
2061 let start = (range.start as i64 + delta) as usize;
2062 let end = (range.end as i64 + delta) as usize;
2063 Some((start..end, new_text))
2064 });
2065
2066 self.start_transaction();
2067 self.text.set_line_ending(diff.line_ending);
2068 self.edit(adjusted_edits, None, cx);
2069 self.end_transaction(cx)
2070 }
2071
2072 pub fn has_unsaved_edits(&self) -> bool {
2073 let (last_version, has_unsaved_edits) = self.has_unsaved_edits.take();
2074
2075 if last_version == self.version {
2076 self.has_unsaved_edits
2077 .set((last_version, has_unsaved_edits));
2078 return has_unsaved_edits;
2079 }
2080
2081 let has_edits = self.has_edits_since(&self.saved_version);
2082 self.has_unsaved_edits
2083 .set((self.version.clone(), has_edits));
2084 has_edits
2085 }
2086
2087 /// Checks if the buffer has unsaved changes.
2088 pub fn is_dirty(&self) -> bool {
2089 if self.capability == Capability::ReadOnly {
2090 return false;
2091 }
2092 if self.has_conflict {
2093 return true;
2094 }
2095 match self.file.as_ref().map(|f| f.disk_state()) {
2096 Some(DiskState::New) | Some(DiskState::Deleted) => {
2097 !self.is_empty() && self.has_unsaved_edits()
2098 }
2099 _ => self.has_unsaved_edits(),
2100 }
2101 }
2102
2103 /// Checks if the buffer and its file have both changed since the buffer
2104 /// was last saved or reloaded.
2105 pub fn has_conflict(&self) -> bool {
2106 if self.has_conflict {
2107 return true;
2108 }
2109 let Some(file) = self.file.as_ref() else {
2110 return false;
2111 };
2112 match file.disk_state() {
2113 DiskState::New => false,
2114 DiskState::Present { mtime } => match self.saved_mtime {
2115 Some(saved_mtime) => {
2116 mtime.bad_is_greater_than(saved_mtime) && self.has_unsaved_edits()
2117 }
2118 None => true,
2119 },
2120 DiskState::Deleted => false,
2121 }
2122 }
2123
2124 /// Gets a [`Subscription`] that tracks all of the changes to the buffer's text.
2125 pub fn subscribe(&mut self) -> Subscription {
2126 self.text.subscribe()
2127 }
2128
2129 /// Adds a bit to the list of bits that are set when the buffer's text changes.
2130 ///
2131 /// This allows downstream code to check if the buffer's text has changed without
2132 /// waiting for an effect cycle, which would be required if using eents.
2133 pub fn record_changes(&mut self, bit: rc::Weak<Cell<bool>>) {
2134 if let Err(ix) = self
2135 .change_bits
2136 .binary_search_by_key(&rc::Weak::as_ptr(&bit), rc::Weak::as_ptr)
2137 {
2138 self.change_bits.insert(ix, bit);
2139 }
2140 }
2141
2142 /// Set the change bit for all "listeners".
2143 fn was_changed(&mut self) {
2144 self.change_bits.retain(|change_bit| {
2145 change_bit
2146 .upgrade()
2147 .inspect(|bit| {
2148 _ = bit.replace(true);
2149 })
2150 .is_some()
2151 });
2152 }
2153
2154 /// Starts a transaction, if one is not already in-progress. When undoing or
2155 /// redoing edits, all of the edits performed within a transaction are undone
2156 /// or redone together.
2157 pub fn start_transaction(&mut self) -> Option<TransactionId> {
2158 self.start_transaction_at(Instant::now())
2159 }
2160
2161 /// Starts a transaction, providing the current time. Subsequent transactions
2162 /// that occur within a short period of time will be grouped together. This
2163 /// is controlled by the buffer's undo grouping duration.
2164 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
2165 self.transaction_depth += 1;
2166 if self.was_dirty_before_starting_transaction.is_none() {
2167 self.was_dirty_before_starting_transaction = Some(self.is_dirty());
2168 }
2169 self.text.start_transaction_at(now)
2170 }
2171
2172 /// Terminates the current transaction, if this is the outermost transaction.
2173 pub fn end_transaction(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2174 self.end_transaction_at(Instant::now(), cx)
2175 }
2176
2177 /// Terminates the current transaction, providing the current time. Subsequent transactions
2178 /// that occur within a short period of time will be grouped together. This
2179 /// is controlled by the buffer's undo grouping duration.
2180 pub fn end_transaction_at(
2181 &mut self,
2182 now: Instant,
2183 cx: &mut Context<Self>,
2184 ) -> Option<TransactionId> {
2185 assert!(self.transaction_depth > 0);
2186 self.transaction_depth -= 1;
2187 let was_dirty = if self.transaction_depth == 0 {
2188 self.was_dirty_before_starting_transaction.take().unwrap()
2189 } else {
2190 false
2191 };
2192 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
2193 self.did_edit(&start_version, was_dirty, cx);
2194 Some(transaction_id)
2195 } else {
2196 None
2197 }
2198 }
2199
2200 /// Manually add a transaction to the buffer's undo history.
2201 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
2202 self.text.push_transaction(transaction, now);
2203 }
2204
2205 /// Differs from `push_transaction` in that it does not clear the redo
2206 /// stack. Intended to be used to create a parent transaction to merge
2207 /// potential child transactions into.
2208 ///
2209 /// The caller is responsible for removing it from the undo history using
2210 /// `forget_transaction` if no edits are merged into it. Otherwise, if edits
2211 /// are merged into this transaction, the caller is responsible for ensuring
2212 /// the redo stack is cleared. The easiest way to ensure the redo stack is
2213 /// cleared is to create transactions with the usual `start_transaction` and
2214 /// `end_transaction` methods and merging the resulting transactions into
2215 /// the transaction created by this method
2216 pub fn push_empty_transaction(&mut self, now: Instant) -> TransactionId {
2217 self.text.push_empty_transaction(now)
2218 }
2219
2220 /// Prevent the last transaction from being grouped with any subsequent transactions,
2221 /// even if they occur with the buffer's undo grouping duration.
2222 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
2223 self.text.finalize_last_transaction()
2224 }
2225
2226 /// Manually group all changes since a given transaction.
2227 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
2228 self.text.group_until_transaction(transaction_id);
2229 }
2230
2231 /// Manually remove a transaction from the buffer's undo history
2232 pub fn forget_transaction(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
2233 self.text.forget_transaction(transaction_id)
2234 }
2235
2236 /// Retrieve a transaction from the buffer's undo history
2237 pub fn get_transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> {
2238 self.text.get_transaction(transaction_id)
2239 }
2240
2241 /// Manually merge two transactions in the buffer's undo history.
2242 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
2243 self.text.merge_transactions(transaction, destination);
2244 }
2245
2246 /// Waits for the buffer to receive operations with the given timestamps.
2247 pub fn wait_for_edits<It: IntoIterator<Item = clock::Lamport>>(
2248 &mut self,
2249 edit_ids: It,
2250 ) -> impl Future<Output = Result<()>> + use<It> {
2251 self.text.wait_for_edits(edit_ids)
2252 }
2253
2254 /// Waits for the buffer to receive the operations necessary for resolving the given anchors.
2255 pub fn wait_for_anchors<It: IntoIterator<Item = Anchor>>(
2256 &mut self,
2257 anchors: It,
2258 ) -> impl 'static + Future<Output = Result<()>> + use<It> {
2259 self.text.wait_for_anchors(anchors)
2260 }
2261
2262 /// Waits for the buffer to receive operations up to the given version.
2263 pub fn wait_for_version(
2264 &mut self,
2265 version: clock::Global,
2266 ) -> impl Future<Output = Result<()>> + use<> {
2267 self.text.wait_for_version(version)
2268 }
2269
2270 /// Forces all futures returned by [`Buffer::wait_for_version`], [`Buffer::wait_for_edits`], or
2271 /// [`Buffer::wait_for_version`] to resolve with an error.
2272 pub fn give_up_waiting(&mut self) {
2273 self.text.give_up_waiting();
2274 }
2275
2276 pub fn wait_for_autoindent_applied(&mut self) -> Option<oneshot::Receiver<()>> {
2277 let mut rx = None;
2278 if !self.autoindent_requests.is_empty() {
2279 let channel = oneshot::channel();
2280 self.wait_for_autoindent_txs.push(channel.0);
2281 rx = Some(channel.1);
2282 }
2283 rx
2284 }
2285
2286 /// Stores a set of selections that should be broadcasted to all of the buffer's replicas.
2287 pub fn set_active_selections(
2288 &mut self,
2289 selections: Arc<[Selection<Anchor>]>,
2290 line_mode: bool,
2291 cursor_shape: CursorShape,
2292 cx: &mut Context<Self>,
2293 ) {
2294 let lamport_timestamp = self.text.lamport_clock.tick();
2295 self.remote_selections.insert(
2296 self.text.replica_id(),
2297 SelectionSet {
2298 selections: selections.clone(),
2299 lamport_timestamp,
2300 line_mode,
2301 cursor_shape,
2302 },
2303 );
2304 self.send_operation(
2305 Operation::UpdateSelections {
2306 selections,
2307 line_mode,
2308 lamport_timestamp,
2309 cursor_shape,
2310 },
2311 true,
2312 cx,
2313 );
2314 self.non_text_state_update_count += 1;
2315 cx.notify();
2316 }
2317
2318 /// Clears the selections, so that other replicas of the buffer do not see any selections for
2319 /// this replica.
2320 pub fn remove_active_selections(&mut self, cx: &mut Context<Self>) {
2321 if self
2322 .remote_selections
2323 .get(&self.text.replica_id())
2324 .is_none_or(|set| !set.selections.is_empty())
2325 {
2326 self.set_active_selections(Arc::default(), false, Default::default(), cx);
2327 }
2328 }
2329
2330 pub fn set_agent_selections(
2331 &mut self,
2332 selections: Arc<[Selection<Anchor>]>,
2333 line_mode: bool,
2334 cursor_shape: CursorShape,
2335 cx: &mut Context<Self>,
2336 ) {
2337 let lamport_timestamp = self.text.lamport_clock.tick();
2338 self.remote_selections.insert(
2339 ReplicaId::AGENT,
2340 SelectionSet {
2341 selections,
2342 lamport_timestamp,
2343 line_mode,
2344 cursor_shape,
2345 },
2346 );
2347 self.non_text_state_update_count += 1;
2348 cx.notify();
2349 }
2350
2351 pub fn remove_agent_selections(&mut self, cx: &mut Context<Self>) {
2352 self.set_agent_selections(Arc::default(), false, Default::default(), cx);
2353 }
2354
2355 /// Replaces the buffer's entire text.
2356 pub fn set_text<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2357 where
2358 T: Into<Arc<str>>,
2359 {
2360 self.autoindent_requests.clear();
2361 self.edit([(0..self.len(), text)], None, cx)
2362 }
2363
2364 /// Appends the given text to the end of the buffer.
2365 pub fn append<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2366 where
2367 T: Into<Arc<str>>,
2368 {
2369 self.edit([(self.len()..self.len(), text)], None, cx)
2370 }
2371
2372 /// Applies the given edits to the buffer. Each edit is specified as a range of text to
2373 /// delete, and a string of text to insert at that location.
2374 ///
2375 /// If an [`AutoindentMode`] is provided, then the buffer will enqueue an auto-indent
2376 /// request for the edited ranges, which will be processed when the buffer finishes
2377 /// parsing.
2378 ///
2379 /// Parsing takes place at the end of a transaction, and may compute synchronously
2380 /// or asynchronously, depending on the changes.
2381 pub fn edit<I, S, T>(
2382 &mut self,
2383 edits_iter: I,
2384 autoindent_mode: Option<AutoindentMode>,
2385 cx: &mut Context<Self>,
2386 ) -> Option<clock::Lamport>
2387 where
2388 I: IntoIterator<Item = (Range<S>, T)>,
2389 S: ToOffset,
2390 T: Into<Arc<str>>,
2391 {
2392 // Skip invalid edits and coalesce contiguous ones.
2393 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
2394
2395 for (range, new_text) in edits_iter {
2396 let mut range = range.start.to_offset(self)..range.end.to_offset(self);
2397
2398 if range.start > range.end {
2399 mem::swap(&mut range.start, &mut range.end);
2400 }
2401 let new_text = new_text.into();
2402 if !new_text.is_empty() || !range.is_empty() {
2403 if let Some((prev_range, prev_text)) = edits.last_mut()
2404 && prev_range.end >= range.start
2405 {
2406 prev_range.end = cmp::max(prev_range.end, range.end);
2407 *prev_text = format!("{prev_text}{new_text}").into();
2408 } else {
2409 edits.push((range, new_text));
2410 }
2411 }
2412 }
2413 if edits.is_empty() {
2414 return None;
2415 }
2416
2417 self.start_transaction();
2418 self.pending_autoindent.take();
2419 let autoindent_request = autoindent_mode
2420 .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
2421
2422 let edit_operation = self.text.edit(edits.iter().cloned());
2423 let edit_id = edit_operation.timestamp();
2424
2425 if let Some((before_edit, mode)) = autoindent_request {
2426 let mut delta = 0isize;
2427 let mut previous_setting = None;
2428 let entries: Vec<_> = edits
2429 .into_iter()
2430 .enumerate()
2431 .zip(&edit_operation.as_edit().unwrap().new_text)
2432 .filter(|((_, (range, _)), _)| {
2433 let language = before_edit.language_at(range.start);
2434 let language_id = language.map(|l| l.id());
2435 if let Some((cached_language_id, auto_indent)) = previous_setting
2436 && cached_language_id == language_id
2437 {
2438 auto_indent
2439 } else {
2440 // The auto-indent setting is not present in editorconfigs, hence
2441 // we can avoid passing the file here.
2442 let auto_indent =
2443 language_settings(language.map(|l| l.name()), None, cx).auto_indent;
2444 previous_setting = Some((language_id, auto_indent));
2445 auto_indent
2446 }
2447 })
2448 .map(|((ix, (range, _)), new_text)| {
2449 let new_text_length = new_text.len();
2450 let old_start = range.start.to_point(&before_edit);
2451 let new_start = (delta + range.start as isize) as usize;
2452 let range_len = range.end - range.start;
2453 delta += new_text_length as isize - range_len as isize;
2454
2455 // Decide what range of the insertion to auto-indent, and whether
2456 // the first line of the insertion should be considered a newly-inserted line
2457 // or an edit to an existing line.
2458 let mut range_of_insertion_to_indent = 0..new_text_length;
2459 let mut first_line_is_new = true;
2460
2461 let old_line_start = before_edit.indent_size_for_line(old_start.row).len;
2462 let old_line_end = before_edit.line_len(old_start.row);
2463
2464 if old_start.column > old_line_start {
2465 first_line_is_new = false;
2466 }
2467
2468 if !new_text.contains('\n')
2469 && (old_start.column + (range_len as u32) < old_line_end
2470 || old_line_end == old_line_start)
2471 {
2472 first_line_is_new = false;
2473 }
2474
2475 // When inserting text starting with a newline, avoid auto-indenting the
2476 // previous line.
2477 if new_text.starts_with('\n') {
2478 range_of_insertion_to_indent.start += 1;
2479 first_line_is_new = true;
2480 }
2481
2482 let mut original_indent_column = None;
2483 if let AutoindentMode::Block {
2484 original_indent_columns,
2485 } = &mode
2486 {
2487 original_indent_column = Some(if new_text.starts_with('\n') {
2488 indent_size_for_text(
2489 new_text[range_of_insertion_to_indent.clone()].chars(),
2490 )
2491 .len
2492 } else {
2493 original_indent_columns
2494 .get(ix)
2495 .copied()
2496 .flatten()
2497 .unwrap_or_else(|| {
2498 indent_size_for_text(
2499 new_text[range_of_insertion_to_indent.clone()].chars(),
2500 )
2501 .len
2502 })
2503 });
2504
2505 // Avoid auto-indenting the line after the edit.
2506 if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
2507 range_of_insertion_to_indent.end -= 1;
2508 }
2509 }
2510
2511 AutoindentRequestEntry {
2512 first_line_is_new,
2513 original_indent_column,
2514 indent_size: before_edit.language_indent_size_at(range.start, cx),
2515 range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
2516 ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
2517 }
2518 })
2519 .collect();
2520
2521 if !entries.is_empty() {
2522 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2523 before_edit,
2524 entries,
2525 is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
2526 ignore_empty_lines: false,
2527 }));
2528 }
2529 }
2530
2531 self.end_transaction(cx);
2532 self.send_operation(Operation::Buffer(edit_operation), true, cx);
2533 Some(edit_id)
2534 }
2535
2536 fn did_edit(&mut self, old_version: &clock::Global, was_dirty: bool, cx: &mut Context<Self>) {
2537 self.was_changed();
2538
2539 if self.edits_since::<usize>(old_version).next().is_none() {
2540 return;
2541 }
2542
2543 self.reparse(cx);
2544 cx.emit(BufferEvent::Edited);
2545 if was_dirty != self.is_dirty() {
2546 cx.emit(BufferEvent::DirtyChanged);
2547 }
2548 cx.notify();
2549 }
2550
2551 pub fn autoindent_ranges<I, T>(&mut self, ranges: I, cx: &mut Context<Self>)
2552 where
2553 I: IntoIterator<Item = Range<T>>,
2554 T: ToOffset + Copy,
2555 {
2556 let before_edit = self.snapshot();
2557 let entries = ranges
2558 .into_iter()
2559 .map(|range| AutoindentRequestEntry {
2560 range: before_edit.anchor_before(range.start)..before_edit.anchor_after(range.end),
2561 first_line_is_new: true,
2562 indent_size: before_edit.language_indent_size_at(range.start, cx),
2563 original_indent_column: None,
2564 })
2565 .collect();
2566 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2567 before_edit,
2568 entries,
2569 is_block_mode: false,
2570 ignore_empty_lines: true,
2571 }));
2572 self.request_autoindent(cx);
2573 }
2574
2575 // Inserts newlines at the given position to create an empty line, returning the start of the new line.
2576 // You can also request the insertion of empty lines above and below the line starting at the returned point.
2577 pub fn insert_empty_line(
2578 &mut self,
2579 position: impl ToPoint,
2580 space_above: bool,
2581 space_below: bool,
2582 cx: &mut Context<Self>,
2583 ) -> Point {
2584 let mut position = position.to_point(self);
2585
2586 self.start_transaction();
2587
2588 self.edit(
2589 [(position..position, "\n")],
2590 Some(AutoindentMode::EachLine),
2591 cx,
2592 );
2593
2594 if position.column > 0 {
2595 position += Point::new(1, 0);
2596 }
2597
2598 if !self.is_line_blank(position.row) {
2599 self.edit(
2600 [(position..position, "\n")],
2601 Some(AutoindentMode::EachLine),
2602 cx,
2603 );
2604 }
2605
2606 if space_above && position.row > 0 && !self.is_line_blank(position.row - 1) {
2607 self.edit(
2608 [(position..position, "\n")],
2609 Some(AutoindentMode::EachLine),
2610 cx,
2611 );
2612 position.row += 1;
2613 }
2614
2615 if space_below
2616 && (position.row == self.max_point().row || !self.is_line_blank(position.row + 1))
2617 {
2618 self.edit(
2619 [(position..position, "\n")],
2620 Some(AutoindentMode::EachLine),
2621 cx,
2622 );
2623 }
2624
2625 self.end_transaction(cx);
2626
2627 position
2628 }
2629
2630 /// Applies the given remote operations to the buffer.
2631 pub fn apply_ops<I: IntoIterator<Item = Operation>>(&mut self, ops: I, cx: &mut Context<Self>) {
2632 self.pending_autoindent.take();
2633 let was_dirty = self.is_dirty();
2634 let old_version = self.version.clone();
2635 let mut deferred_ops = Vec::new();
2636 let buffer_ops = ops
2637 .into_iter()
2638 .filter_map(|op| match op {
2639 Operation::Buffer(op) => Some(op),
2640 _ => {
2641 if self.can_apply_op(&op) {
2642 self.apply_op(op, cx);
2643 } else {
2644 deferred_ops.push(op);
2645 }
2646 None
2647 }
2648 })
2649 .collect::<Vec<_>>();
2650 for operation in buffer_ops.iter() {
2651 self.send_operation(Operation::Buffer(operation.clone()), false, cx);
2652 }
2653 self.text.apply_ops(buffer_ops);
2654 self.deferred_ops.insert(deferred_ops);
2655 self.flush_deferred_ops(cx);
2656 self.did_edit(&old_version, was_dirty, cx);
2657 // Notify independently of whether the buffer was edited as the operations could include a
2658 // selection update.
2659 cx.notify();
2660 }
2661
2662 fn flush_deferred_ops(&mut self, cx: &mut Context<Self>) {
2663 let mut deferred_ops = Vec::new();
2664 for op in self.deferred_ops.drain().iter().cloned() {
2665 if self.can_apply_op(&op) {
2666 self.apply_op(op, cx);
2667 } else {
2668 deferred_ops.push(op);
2669 }
2670 }
2671 self.deferred_ops.insert(deferred_ops);
2672 }
2673
2674 pub fn has_deferred_ops(&self) -> bool {
2675 !self.deferred_ops.is_empty() || self.text.has_deferred_ops()
2676 }
2677
2678 fn can_apply_op(&self, operation: &Operation) -> bool {
2679 match operation {
2680 Operation::Buffer(_) => {
2681 unreachable!("buffer operations should never be applied at this layer")
2682 }
2683 Operation::UpdateDiagnostics {
2684 diagnostics: diagnostic_set,
2685 ..
2686 } => diagnostic_set.iter().all(|diagnostic| {
2687 self.text.can_resolve(&diagnostic.range.start)
2688 && self.text.can_resolve(&diagnostic.range.end)
2689 }),
2690 Operation::UpdateSelections { selections, .. } => selections
2691 .iter()
2692 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
2693 Operation::UpdateCompletionTriggers { .. } | Operation::UpdateLineEnding { .. } => true,
2694 }
2695 }
2696
2697 fn apply_op(&mut self, operation: Operation, cx: &mut Context<Self>) {
2698 match operation {
2699 Operation::Buffer(_) => {
2700 unreachable!("buffer operations should never be applied at this layer")
2701 }
2702 Operation::UpdateDiagnostics {
2703 server_id,
2704 diagnostics: diagnostic_set,
2705 lamport_timestamp,
2706 } => {
2707 let snapshot = self.snapshot();
2708 self.apply_diagnostic_update(
2709 server_id,
2710 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
2711 lamport_timestamp,
2712 cx,
2713 );
2714 }
2715 Operation::UpdateSelections {
2716 selections,
2717 lamport_timestamp,
2718 line_mode,
2719 cursor_shape,
2720 } => {
2721 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id)
2722 && set.lamport_timestamp > lamport_timestamp
2723 {
2724 return;
2725 }
2726
2727 self.remote_selections.insert(
2728 lamport_timestamp.replica_id,
2729 SelectionSet {
2730 selections,
2731 lamport_timestamp,
2732 line_mode,
2733 cursor_shape,
2734 },
2735 );
2736 self.text.lamport_clock.observe(lamport_timestamp);
2737 self.non_text_state_update_count += 1;
2738 }
2739 Operation::UpdateCompletionTriggers {
2740 triggers,
2741 lamport_timestamp,
2742 server_id,
2743 } => {
2744 if triggers.is_empty() {
2745 self.completion_triggers_per_language_server
2746 .remove(&server_id);
2747 self.completion_triggers = self
2748 .completion_triggers_per_language_server
2749 .values()
2750 .flat_map(|triggers| triggers.iter().cloned())
2751 .collect();
2752 } else {
2753 self.completion_triggers_per_language_server
2754 .insert(server_id, triggers.iter().cloned().collect());
2755 self.completion_triggers.extend(triggers);
2756 }
2757 self.text.lamport_clock.observe(lamport_timestamp);
2758 }
2759 Operation::UpdateLineEnding {
2760 line_ending,
2761 lamport_timestamp,
2762 } => {
2763 self.text.set_line_ending(line_ending);
2764 self.text.lamport_clock.observe(lamport_timestamp);
2765 }
2766 }
2767 }
2768
2769 fn apply_diagnostic_update(
2770 &mut self,
2771 server_id: LanguageServerId,
2772 diagnostics: DiagnosticSet,
2773 lamport_timestamp: clock::Lamport,
2774 cx: &mut Context<Self>,
2775 ) {
2776 if lamport_timestamp > self.diagnostics_timestamp {
2777 let ix = self.diagnostics.binary_search_by_key(&server_id, |e| e.0);
2778 if diagnostics.is_empty() {
2779 if let Ok(ix) = ix {
2780 self.diagnostics.remove(ix);
2781 }
2782 } else {
2783 match ix {
2784 Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)),
2785 Ok(ix) => self.diagnostics[ix].1 = diagnostics,
2786 };
2787 }
2788 self.diagnostics_timestamp = lamport_timestamp;
2789 self.non_text_state_update_count += 1;
2790 self.text.lamport_clock.observe(lamport_timestamp);
2791 cx.notify();
2792 cx.emit(BufferEvent::DiagnosticsUpdated);
2793 }
2794 }
2795
2796 fn send_operation(&mut self, operation: Operation, is_local: bool, cx: &mut Context<Self>) {
2797 self.was_changed();
2798 cx.emit(BufferEvent::Operation {
2799 operation,
2800 is_local,
2801 });
2802 }
2803
2804 /// Removes the selections for a given peer.
2805 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut Context<Self>) {
2806 self.remote_selections.remove(&replica_id);
2807 cx.notify();
2808 }
2809
2810 /// Undoes the most recent transaction.
2811 pub fn undo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2812 let was_dirty = self.is_dirty();
2813 let old_version = self.version.clone();
2814
2815 if let Some((transaction_id, operation)) = self.text.undo() {
2816 self.send_operation(Operation::Buffer(operation), true, cx);
2817 self.did_edit(&old_version, was_dirty, cx);
2818 Some(transaction_id)
2819 } else {
2820 None
2821 }
2822 }
2823
2824 /// Manually undoes a specific transaction in the buffer's undo history.
2825 pub fn undo_transaction(
2826 &mut self,
2827 transaction_id: TransactionId,
2828 cx: &mut Context<Self>,
2829 ) -> bool {
2830 let was_dirty = self.is_dirty();
2831 let old_version = self.version.clone();
2832 if let Some(operation) = self.text.undo_transaction(transaction_id) {
2833 self.send_operation(Operation::Buffer(operation), true, cx);
2834 self.did_edit(&old_version, was_dirty, cx);
2835 true
2836 } else {
2837 false
2838 }
2839 }
2840
2841 /// Manually undoes all changes after a given transaction in the buffer's undo history.
2842 pub fn undo_to_transaction(
2843 &mut self,
2844 transaction_id: TransactionId,
2845 cx: &mut Context<Self>,
2846 ) -> bool {
2847 let was_dirty = self.is_dirty();
2848 let old_version = self.version.clone();
2849
2850 let operations = self.text.undo_to_transaction(transaction_id);
2851 let undone = !operations.is_empty();
2852 for operation in operations {
2853 self.send_operation(Operation::Buffer(operation), true, cx);
2854 }
2855 if undone {
2856 self.did_edit(&old_version, was_dirty, cx)
2857 }
2858 undone
2859 }
2860
2861 pub fn undo_operations(&mut self, counts: HashMap<Lamport, u32>, cx: &mut Context<Buffer>) {
2862 let was_dirty = self.is_dirty();
2863 let operation = self.text.undo_operations(counts);
2864 let old_version = self.version.clone();
2865 self.send_operation(Operation::Buffer(operation), true, cx);
2866 self.did_edit(&old_version, was_dirty, cx);
2867 }
2868
2869 /// Manually redoes a specific transaction in the buffer's redo history.
2870 pub fn redo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2871 let was_dirty = self.is_dirty();
2872 let old_version = self.version.clone();
2873
2874 if let Some((transaction_id, operation)) = self.text.redo() {
2875 self.send_operation(Operation::Buffer(operation), true, cx);
2876 self.did_edit(&old_version, was_dirty, cx);
2877 Some(transaction_id)
2878 } else {
2879 None
2880 }
2881 }
2882
2883 /// Manually undoes all changes until a given transaction in the buffer's redo history.
2884 pub fn redo_to_transaction(
2885 &mut self,
2886 transaction_id: TransactionId,
2887 cx: &mut Context<Self>,
2888 ) -> bool {
2889 let was_dirty = self.is_dirty();
2890 let old_version = self.version.clone();
2891
2892 let operations = self.text.redo_to_transaction(transaction_id);
2893 let redone = !operations.is_empty();
2894 for operation in operations {
2895 self.send_operation(Operation::Buffer(operation), true, cx);
2896 }
2897 if redone {
2898 self.did_edit(&old_version, was_dirty, cx)
2899 }
2900 redone
2901 }
2902
2903 /// Override current completion triggers with the user-provided completion triggers.
2904 pub fn set_completion_triggers(
2905 &mut self,
2906 server_id: LanguageServerId,
2907 triggers: BTreeSet<String>,
2908 cx: &mut Context<Self>,
2909 ) {
2910 self.completion_triggers_timestamp = self.text.lamport_clock.tick();
2911 if triggers.is_empty() {
2912 self.completion_triggers_per_language_server
2913 .remove(&server_id);
2914 self.completion_triggers = self
2915 .completion_triggers_per_language_server
2916 .values()
2917 .flat_map(|triggers| triggers.iter().cloned())
2918 .collect();
2919 } else {
2920 self.completion_triggers_per_language_server
2921 .insert(server_id, triggers.clone());
2922 self.completion_triggers.extend(triggers.iter().cloned());
2923 }
2924 self.send_operation(
2925 Operation::UpdateCompletionTriggers {
2926 triggers: triggers.into_iter().collect(),
2927 lamport_timestamp: self.completion_triggers_timestamp,
2928 server_id,
2929 },
2930 true,
2931 cx,
2932 );
2933 cx.notify();
2934 }
2935
2936 /// Returns a list of strings which trigger a completion menu for this language.
2937 /// Usually this is driven by LSP server which returns a list of trigger characters for completions.
2938 pub fn completion_triggers(&self) -> &BTreeSet<String> {
2939 &self.completion_triggers
2940 }
2941
2942 /// Call this directly after performing edits to prevent the preview tab
2943 /// from being dismissed by those edits. It causes `should_dismiss_preview`
2944 /// to return false until there are additional edits.
2945 pub fn refresh_preview(&mut self) {
2946 self.preview_version = self.version.clone();
2947 }
2948
2949 /// Whether we should preserve the preview status of a tab containing this buffer.
2950 pub fn preserve_preview(&self) -> bool {
2951 !self.has_edits_since(&self.preview_version)
2952 }
2953}
2954
2955#[doc(hidden)]
2956#[cfg(any(test, feature = "test-support"))]
2957impl Buffer {
2958 pub fn edit_via_marked_text(
2959 &mut self,
2960 marked_string: &str,
2961 autoindent_mode: Option<AutoindentMode>,
2962 cx: &mut Context<Self>,
2963 ) {
2964 let edits = self.edits_for_marked_text(marked_string);
2965 self.edit(edits, autoindent_mode, cx);
2966 }
2967
2968 pub fn set_group_interval(&mut self, group_interval: Duration) {
2969 self.text.set_group_interval(group_interval);
2970 }
2971
2972 pub fn randomly_edit<T>(&mut self, rng: &mut T, old_range_count: usize, cx: &mut Context<Self>)
2973 where
2974 T: rand::Rng,
2975 {
2976 let mut edits: Vec<(Range<usize>, String)> = Vec::new();
2977 let mut last_end = None;
2978 for _ in 0..old_range_count {
2979 if last_end.is_some_and(|last_end| last_end >= self.len()) {
2980 break;
2981 }
2982
2983 let new_start = last_end.map_or(0, |last_end| last_end + 1);
2984 let mut range = self.random_byte_range(new_start, rng);
2985 if rng.random_bool(0.2) {
2986 mem::swap(&mut range.start, &mut range.end);
2987 }
2988 last_end = Some(range.end);
2989
2990 let new_text_len = rng.random_range(0..10);
2991 let mut new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
2992 new_text = new_text.to_uppercase();
2993
2994 edits.push((range, new_text));
2995 }
2996 log::info!("mutating buffer {:?} with {:?}", self.replica_id(), edits);
2997 self.edit(edits, None, cx);
2998 }
2999
3000 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut Context<Self>) {
3001 let was_dirty = self.is_dirty();
3002 let old_version = self.version.clone();
3003
3004 let ops = self.text.randomly_undo_redo(rng);
3005 if !ops.is_empty() {
3006 for op in ops {
3007 self.send_operation(Operation::Buffer(op), true, cx);
3008 self.did_edit(&old_version, was_dirty, cx);
3009 }
3010 }
3011 }
3012}
3013
3014impl EventEmitter<BufferEvent> for Buffer {}
3015
3016impl Deref for Buffer {
3017 type Target = TextBuffer;
3018
3019 fn deref(&self) -> &Self::Target {
3020 &self.text
3021 }
3022}
3023
3024impl BufferSnapshot {
3025 /// Returns [`IndentSize`] for a given line that respects user settings and
3026 /// language preferences.
3027 pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
3028 indent_size_for_line(self, row)
3029 }
3030
3031 /// Returns [`IndentSize`] for a given position that respects user settings
3032 /// and language preferences.
3033 pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &App) -> IndentSize {
3034 let settings = language_settings(
3035 self.language_at(position).map(|l| l.name()),
3036 self.file(),
3037 cx,
3038 );
3039 if settings.hard_tabs {
3040 IndentSize::tab()
3041 } else {
3042 IndentSize::spaces(settings.tab_size.get())
3043 }
3044 }
3045
3046 /// Retrieve the suggested indent size for all of the given rows. The unit of indentation
3047 /// is passed in as `single_indent_size`.
3048 pub fn suggested_indents(
3049 &self,
3050 rows: impl Iterator<Item = u32>,
3051 single_indent_size: IndentSize,
3052 ) -> BTreeMap<u32, IndentSize> {
3053 let mut result = BTreeMap::new();
3054
3055 for row_range in contiguous_ranges(rows, 10) {
3056 let suggestions = match self.suggest_autoindents(row_range.clone()) {
3057 Some(suggestions) => suggestions,
3058 _ => break,
3059 };
3060
3061 for (row, suggestion) in row_range.zip(suggestions) {
3062 let indent_size = if let Some(suggestion) = suggestion {
3063 result
3064 .get(&suggestion.basis_row)
3065 .copied()
3066 .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
3067 .with_delta(suggestion.delta, single_indent_size)
3068 } else {
3069 self.indent_size_for_line(row)
3070 };
3071
3072 result.insert(row, indent_size);
3073 }
3074 }
3075
3076 result
3077 }
3078
3079 fn suggest_autoindents(
3080 &self,
3081 row_range: Range<u32>,
3082 ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
3083 let config = &self.language.as_ref()?.config;
3084 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
3085
3086 #[derive(Debug, Clone)]
3087 struct StartPosition {
3088 start: Point,
3089 suffix: SharedString,
3090 }
3091
3092 // Find the suggested indentation ranges based on the syntax tree.
3093 let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
3094 let end = Point::new(row_range.end, 0);
3095 let range = (start..end).to_offset(&self.text);
3096 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3097 Some(&grammar.indents_config.as_ref()?.query)
3098 });
3099 let indent_configs = matches
3100 .grammars()
3101 .iter()
3102 .map(|grammar| grammar.indents_config.as_ref().unwrap())
3103 .collect::<Vec<_>>();
3104
3105 let mut indent_ranges = Vec::<Range<Point>>::new();
3106 let mut start_positions = Vec::<StartPosition>::new();
3107 let mut outdent_positions = Vec::<Point>::new();
3108 while let Some(mat) = matches.peek() {
3109 let mut start: Option<Point> = None;
3110 let mut end: Option<Point> = None;
3111
3112 let config = indent_configs[mat.grammar_index];
3113 for capture in mat.captures {
3114 if capture.index == config.indent_capture_ix {
3115 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
3116 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
3117 } else if Some(capture.index) == config.start_capture_ix {
3118 start = Some(Point::from_ts_point(capture.node.end_position()));
3119 } else if Some(capture.index) == config.end_capture_ix {
3120 end = Some(Point::from_ts_point(capture.node.start_position()));
3121 } else if Some(capture.index) == config.outdent_capture_ix {
3122 outdent_positions.push(Point::from_ts_point(capture.node.start_position()));
3123 } else if let Some(suffix) = config.suffixed_start_captures.get(&capture.index) {
3124 start_positions.push(StartPosition {
3125 start: Point::from_ts_point(capture.node.start_position()),
3126 suffix: suffix.clone(),
3127 });
3128 }
3129 }
3130
3131 matches.advance();
3132 if let Some((start, end)) = start.zip(end) {
3133 if start.row == end.row {
3134 continue;
3135 }
3136 let range = start..end;
3137 match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
3138 Err(ix) => indent_ranges.insert(ix, range),
3139 Ok(ix) => {
3140 let prev_range = &mut indent_ranges[ix];
3141 prev_range.end = prev_range.end.max(range.end);
3142 }
3143 }
3144 }
3145 }
3146
3147 let mut error_ranges = Vec::<Range<Point>>::new();
3148 let mut matches = self
3149 .syntax
3150 .matches(range, &self.text, |grammar| grammar.error_query.as_ref());
3151 while let Some(mat) = matches.peek() {
3152 let node = mat.captures[0].node;
3153 let start = Point::from_ts_point(node.start_position());
3154 let end = Point::from_ts_point(node.end_position());
3155 let range = start..end;
3156 let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
3157 Ok(ix) | Err(ix) => ix,
3158 };
3159 let mut end_ix = ix;
3160 while let Some(existing_range) = error_ranges.get(end_ix) {
3161 if existing_range.end < end {
3162 end_ix += 1;
3163 } else {
3164 break;
3165 }
3166 }
3167 error_ranges.splice(ix..end_ix, [range]);
3168 matches.advance();
3169 }
3170
3171 outdent_positions.sort();
3172 for outdent_position in outdent_positions {
3173 // find the innermost indent range containing this outdent_position
3174 // set its end to the outdent position
3175 if let Some(range_to_truncate) = indent_ranges
3176 .iter_mut()
3177 .filter(|indent_range| indent_range.contains(&outdent_position))
3178 .next_back()
3179 {
3180 range_to_truncate.end = outdent_position;
3181 }
3182 }
3183
3184 start_positions.sort_by_key(|b| b.start);
3185
3186 // Find the suggested indentation increases and decreased based on regexes.
3187 let mut regex_outdent_map = HashMap::default();
3188 let mut last_seen_suffix: HashMap<String, Vec<Point>> = HashMap::default();
3189 let mut start_positions_iter = start_positions.iter().peekable();
3190
3191 let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
3192 self.for_each_line(
3193 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
3194 ..Point::new(row_range.end, 0),
3195 |row, line| {
3196 if config
3197 .decrease_indent_pattern
3198 .as_ref()
3199 .is_some_and(|regex| regex.is_match(line))
3200 {
3201 indent_change_rows.push((row, Ordering::Less));
3202 }
3203 if config
3204 .increase_indent_pattern
3205 .as_ref()
3206 .is_some_and(|regex| regex.is_match(line))
3207 {
3208 indent_change_rows.push((row + 1, Ordering::Greater));
3209 }
3210 while let Some(pos) = start_positions_iter.peek() {
3211 if pos.start.row < row {
3212 let pos = start_positions_iter.next().unwrap();
3213 last_seen_suffix
3214 .entry(pos.suffix.to_string())
3215 .or_default()
3216 .push(pos.start);
3217 } else {
3218 break;
3219 }
3220 }
3221 for rule in &config.decrease_indent_patterns {
3222 if rule.pattern.as_ref().is_some_and(|r| r.is_match(line)) {
3223 let row_start_column = self.indent_size_for_line(row).len;
3224 let basis_row = rule
3225 .valid_after
3226 .iter()
3227 .filter_map(|valid_suffix| last_seen_suffix.get(valid_suffix))
3228 .flatten()
3229 .filter(|start_point| start_point.column <= row_start_column)
3230 .max_by_key(|start_point| start_point.row);
3231 if let Some(outdent_to_row) = basis_row {
3232 regex_outdent_map.insert(row, outdent_to_row.row);
3233 }
3234 break;
3235 }
3236 }
3237 },
3238 );
3239
3240 let mut indent_changes = indent_change_rows.into_iter().peekable();
3241 let mut prev_row = if config.auto_indent_using_last_non_empty_line {
3242 prev_non_blank_row.unwrap_or(0)
3243 } else {
3244 row_range.start.saturating_sub(1)
3245 };
3246
3247 let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
3248 Some(row_range.map(move |row| {
3249 let row_start = Point::new(row, self.indent_size_for_line(row).len);
3250
3251 let mut indent_from_prev_row = false;
3252 let mut outdent_from_prev_row = false;
3253 let mut outdent_to_row = u32::MAX;
3254 let mut from_regex = false;
3255
3256 while let Some((indent_row, delta)) = indent_changes.peek() {
3257 match indent_row.cmp(&row) {
3258 Ordering::Equal => match delta {
3259 Ordering::Less => {
3260 from_regex = true;
3261 outdent_from_prev_row = true
3262 }
3263 Ordering::Greater => {
3264 indent_from_prev_row = true;
3265 from_regex = true
3266 }
3267 _ => {}
3268 },
3269
3270 Ordering::Greater => break,
3271 Ordering::Less => {}
3272 }
3273
3274 indent_changes.next();
3275 }
3276
3277 for range in &indent_ranges {
3278 if range.start.row >= row {
3279 break;
3280 }
3281 if range.start.row == prev_row && range.end > row_start {
3282 indent_from_prev_row = true;
3283 }
3284 if range.end > prev_row_start && range.end <= row_start {
3285 outdent_to_row = outdent_to_row.min(range.start.row);
3286 }
3287 }
3288
3289 if let Some(basis_row) = regex_outdent_map.get(&row) {
3290 indent_from_prev_row = false;
3291 outdent_to_row = *basis_row;
3292 from_regex = true;
3293 }
3294
3295 let within_error = error_ranges
3296 .iter()
3297 .any(|e| e.start.row < row && e.end > row_start);
3298
3299 let suggestion = if outdent_to_row == prev_row
3300 || (outdent_from_prev_row && indent_from_prev_row)
3301 {
3302 Some(IndentSuggestion {
3303 basis_row: prev_row,
3304 delta: Ordering::Equal,
3305 within_error: within_error && !from_regex,
3306 })
3307 } else if indent_from_prev_row {
3308 Some(IndentSuggestion {
3309 basis_row: prev_row,
3310 delta: Ordering::Greater,
3311 within_error: within_error && !from_regex,
3312 })
3313 } else if outdent_to_row < prev_row {
3314 Some(IndentSuggestion {
3315 basis_row: outdent_to_row,
3316 delta: Ordering::Equal,
3317 within_error: within_error && !from_regex,
3318 })
3319 } else if outdent_from_prev_row {
3320 Some(IndentSuggestion {
3321 basis_row: prev_row,
3322 delta: Ordering::Less,
3323 within_error: within_error && !from_regex,
3324 })
3325 } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
3326 {
3327 Some(IndentSuggestion {
3328 basis_row: prev_row,
3329 delta: Ordering::Equal,
3330 within_error: within_error && !from_regex,
3331 })
3332 } else {
3333 None
3334 };
3335
3336 prev_row = row;
3337 prev_row_start = row_start;
3338 suggestion
3339 }))
3340 }
3341
3342 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
3343 while row > 0 {
3344 row -= 1;
3345 if !self.is_line_blank(row) {
3346 return Some(row);
3347 }
3348 }
3349 None
3350 }
3351
3352 fn get_highlights(&self, range: Range<usize>) -> (SyntaxMapCaptures<'_>, Vec<HighlightMap>) {
3353 let captures = self.syntax.captures(range, &self.text, |grammar| {
3354 grammar
3355 .highlights_config
3356 .as_ref()
3357 .map(|config| &config.query)
3358 });
3359 let highlight_maps = captures
3360 .grammars()
3361 .iter()
3362 .map(|grammar| grammar.highlight_map())
3363 .collect();
3364 (captures, highlight_maps)
3365 }
3366
3367 /// Iterates over chunks of text in the given range of the buffer. Text is chunked
3368 /// in an arbitrary way due to being stored in a [`Rope`](text::Rope). The text is also
3369 /// returned in chunks where each chunk has a single syntax highlighting style and
3370 /// diagnostic status.
3371 pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks<'_> {
3372 let range = range.start.to_offset(self)..range.end.to_offset(self);
3373
3374 let mut syntax = None;
3375 if language_aware {
3376 syntax = Some(self.get_highlights(range.clone()));
3377 }
3378 // We want to look at diagnostic spans only when iterating over language-annotated chunks.
3379 let diagnostics = language_aware;
3380 BufferChunks::new(self.text.as_rope(), range, syntax, diagnostics, Some(self))
3381 }
3382
3383 pub fn highlighted_text_for_range<T: ToOffset>(
3384 &self,
3385 range: Range<T>,
3386 override_style: Option<HighlightStyle>,
3387 syntax_theme: &SyntaxTheme,
3388 ) -> HighlightedText {
3389 HighlightedText::from_buffer_range(
3390 range,
3391 &self.text,
3392 &self.syntax,
3393 override_style,
3394 syntax_theme,
3395 )
3396 }
3397
3398 /// Invokes the given callback for each line of text in the given range of the buffer.
3399 /// Uses callback to avoid allocating a string for each line.
3400 fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
3401 let mut line = String::new();
3402 let mut row = range.start.row;
3403 for chunk in self
3404 .as_rope()
3405 .chunks_in_range(range.to_offset(self))
3406 .chain(["\n"])
3407 {
3408 for (newline_ix, text) in chunk.split('\n').enumerate() {
3409 if newline_ix > 0 {
3410 callback(row, &line);
3411 row += 1;
3412 line.clear();
3413 }
3414 line.push_str(text);
3415 }
3416 }
3417 }
3418
3419 /// Iterates over every [`SyntaxLayer`] in the buffer.
3420 pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3421 self.syntax_layers_for_range(0..self.len(), true)
3422 }
3423
3424 pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayer<'_>> {
3425 let offset = position.to_offset(self);
3426 self.syntax_layers_for_range(offset..offset, false)
3427 .filter(|l| {
3428 if let Some(ranges) = l.included_sub_ranges {
3429 ranges.iter().any(|range| {
3430 let start = range.start.to_offset(self);
3431 start <= offset && {
3432 let end = range.end.to_offset(self);
3433 offset < end
3434 }
3435 })
3436 } else {
3437 l.node().start_byte() <= offset && l.node().end_byte() > offset
3438 }
3439 })
3440 .last()
3441 }
3442
3443 pub fn syntax_layers_for_range<D: ToOffset>(
3444 &self,
3445 range: Range<D>,
3446 include_hidden: bool,
3447 ) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3448 self.syntax
3449 .layers_for_range(range, &self.text, include_hidden)
3450 }
3451
3452 pub fn smallest_syntax_layer_containing<D: ToOffset>(
3453 &self,
3454 range: Range<D>,
3455 ) -> Option<SyntaxLayer<'_>> {
3456 let range = range.to_offset(self);
3457 self.syntax
3458 .layers_for_range(range, &self.text, false)
3459 .max_by(|a, b| {
3460 if a.depth != b.depth {
3461 a.depth.cmp(&b.depth)
3462 } else if a.offset.0 != b.offset.0 {
3463 a.offset.0.cmp(&b.offset.0)
3464 } else {
3465 a.node().end_byte().cmp(&b.node().end_byte()).reverse()
3466 }
3467 })
3468 }
3469
3470 /// Returns the main [`Language`].
3471 pub fn language(&self) -> Option<&Arc<Language>> {
3472 self.language.as_ref()
3473 }
3474
3475 /// Returns the [`Language`] at the given location.
3476 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
3477 self.syntax_layer_at(position)
3478 .map(|info| info.language)
3479 .or(self.language.as_ref())
3480 }
3481
3482 /// Returns the settings for the language at the given location.
3483 pub fn settings_at<'a, D: ToOffset>(
3484 &'a self,
3485 position: D,
3486 cx: &'a App,
3487 ) -> Cow<'a, LanguageSettings> {
3488 language_settings(
3489 self.language_at(position).map(|l| l.name()),
3490 self.file.as_ref(),
3491 cx,
3492 )
3493 }
3494
3495 pub fn char_classifier_at<T: ToOffset>(&self, point: T) -> CharClassifier {
3496 CharClassifier::new(self.language_scope_at(point))
3497 }
3498
3499 /// Returns the [`LanguageScope`] at the given location.
3500 pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
3501 let offset = position.to_offset(self);
3502 let mut scope = None;
3503 let mut smallest_range_and_depth: Option<(Range<usize>, usize)> = None;
3504
3505 // Use the layer that has the smallest node intersecting the given point.
3506 for layer in self
3507 .syntax
3508 .layers_for_range(offset..offset, &self.text, false)
3509 {
3510 let mut cursor = layer.node().walk();
3511
3512 let mut range = None;
3513 loop {
3514 let child_range = cursor.node().byte_range();
3515 if !child_range.contains(&offset) {
3516 break;
3517 }
3518
3519 range = Some(child_range);
3520 if cursor.goto_first_child_for_byte(offset).is_none() {
3521 break;
3522 }
3523 }
3524
3525 if let Some(range) = range
3526 && smallest_range_and_depth.as_ref().is_none_or(
3527 |(smallest_range, smallest_range_depth)| {
3528 if layer.depth > *smallest_range_depth {
3529 true
3530 } else if layer.depth == *smallest_range_depth {
3531 range.len() < smallest_range.len()
3532 } else {
3533 false
3534 }
3535 },
3536 )
3537 {
3538 smallest_range_and_depth = Some((range, layer.depth));
3539 scope = Some(LanguageScope {
3540 language: layer.language.clone(),
3541 override_id: layer.override_id(offset, &self.text),
3542 });
3543 }
3544 }
3545
3546 scope.or_else(|| {
3547 self.language.clone().map(|language| LanguageScope {
3548 language,
3549 override_id: None,
3550 })
3551 })
3552 }
3553
3554 /// Returns a tuple of the range and character kind of the word
3555 /// surrounding the given position.
3556 pub fn surrounding_word<T: ToOffset>(
3557 &self,
3558 start: T,
3559 scope_context: Option<CharScopeContext>,
3560 ) -> (Range<usize>, Option<CharKind>) {
3561 let mut start = start.to_offset(self);
3562 let mut end = start;
3563 let mut next_chars = self.chars_at(start).take(128).peekable();
3564 let mut prev_chars = self.reversed_chars_at(start).take(128).peekable();
3565
3566 let classifier = self.char_classifier_at(start).scope_context(scope_context);
3567 let word_kind = cmp::max(
3568 prev_chars.peek().copied().map(|c| classifier.kind(c)),
3569 next_chars.peek().copied().map(|c| classifier.kind(c)),
3570 );
3571
3572 for ch in prev_chars {
3573 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3574 start -= ch.len_utf8();
3575 } else {
3576 break;
3577 }
3578 }
3579
3580 for ch in next_chars {
3581 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3582 end += ch.len_utf8();
3583 } else {
3584 break;
3585 }
3586 }
3587
3588 (start..end, word_kind)
3589 }
3590
3591 /// Moves the TreeCursor to the smallest descendant or ancestor syntax node enclosing the given
3592 /// range. When `require_larger` is true, the node found must be larger than the query range.
3593 ///
3594 /// Returns true if a node was found, and false otherwise. In the `false` case the cursor will
3595 /// be moved to the root of the tree.
3596 fn goto_node_enclosing_range(
3597 cursor: &mut tree_sitter::TreeCursor,
3598 query_range: &Range<usize>,
3599 require_larger: bool,
3600 ) -> bool {
3601 let mut ascending = false;
3602 loop {
3603 let mut range = cursor.node().byte_range();
3604 if query_range.is_empty() {
3605 // When the query range is empty and the current node starts after it, move to the
3606 // previous sibling to find the node the containing node.
3607 if range.start > query_range.start {
3608 cursor.goto_previous_sibling();
3609 range = cursor.node().byte_range();
3610 }
3611 } else {
3612 // When the query range is non-empty and the current node ends exactly at the start,
3613 // move to the next sibling to find a node that extends beyond the start.
3614 if range.end == query_range.start {
3615 cursor.goto_next_sibling();
3616 range = cursor.node().byte_range();
3617 }
3618 }
3619
3620 let encloses = range.contains_inclusive(query_range)
3621 && (!require_larger || range.len() > query_range.len());
3622 if !encloses {
3623 ascending = true;
3624 if !cursor.goto_parent() {
3625 return false;
3626 }
3627 continue;
3628 } else if ascending {
3629 return true;
3630 }
3631
3632 // Descend into the current node.
3633 if cursor
3634 .goto_first_child_for_byte(query_range.start)
3635 .is_none()
3636 {
3637 return true;
3638 }
3639 }
3640 }
3641
3642 pub fn syntax_ancestor<'a, T: ToOffset>(
3643 &'a self,
3644 range: Range<T>,
3645 ) -> Option<tree_sitter::Node<'a>> {
3646 let range = range.start.to_offset(self)..range.end.to_offset(self);
3647 let mut result: Option<tree_sitter::Node<'a>> = None;
3648 for layer in self
3649 .syntax
3650 .layers_for_range(range.clone(), &self.text, true)
3651 {
3652 let mut cursor = layer.node().walk();
3653
3654 // Find the node that both contains the range and is larger than it.
3655 if !Self::goto_node_enclosing_range(&mut cursor, &range, true) {
3656 continue;
3657 }
3658
3659 let left_node = cursor.node();
3660 let mut layer_result = left_node;
3661
3662 // For an empty range, try to find another node immediately to the right of the range.
3663 if left_node.end_byte() == range.start {
3664 let mut right_node = None;
3665 while !cursor.goto_next_sibling() {
3666 if !cursor.goto_parent() {
3667 break;
3668 }
3669 }
3670
3671 while cursor.node().start_byte() == range.start {
3672 right_node = Some(cursor.node());
3673 if !cursor.goto_first_child() {
3674 break;
3675 }
3676 }
3677
3678 // If there is a candidate node on both sides of the (empty) range, then
3679 // decide between the two by favoring a named node over an anonymous token.
3680 // If both nodes are the same in that regard, favor the right one.
3681 if let Some(right_node) = right_node
3682 && (right_node.is_named() || !left_node.is_named())
3683 {
3684 layer_result = right_node;
3685 }
3686 }
3687
3688 if let Some(previous_result) = &result
3689 && previous_result.byte_range().len() < layer_result.byte_range().len()
3690 {
3691 continue;
3692 }
3693 result = Some(layer_result);
3694 }
3695
3696 result
3697 }
3698
3699 /// Find the previous sibling syntax node at the given range.
3700 ///
3701 /// This function locates the syntax node that precedes the node containing
3702 /// the given range. It searches hierarchically by:
3703 /// 1. Finding the node that contains the given range
3704 /// 2. Looking for the previous sibling at the same tree level
3705 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
3706 ///
3707 /// Returns `None` if there is no previous sibling at any ancestor level.
3708 pub fn syntax_prev_sibling<'a, T: ToOffset>(
3709 &'a self,
3710 range: Range<T>,
3711 ) -> Option<tree_sitter::Node<'a>> {
3712 let range = range.start.to_offset(self)..range.end.to_offset(self);
3713 let mut result: Option<tree_sitter::Node<'a>> = None;
3714
3715 for layer in self
3716 .syntax
3717 .layers_for_range(range.clone(), &self.text, true)
3718 {
3719 let mut cursor = layer.node().walk();
3720
3721 // Find the node that contains the range
3722 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
3723 continue;
3724 }
3725
3726 // Look for the previous sibling, moving up ancestor levels if needed
3727 loop {
3728 if cursor.goto_previous_sibling() {
3729 let layer_result = cursor.node();
3730
3731 if let Some(previous_result) = &result {
3732 if previous_result.byte_range().end < layer_result.byte_range().end {
3733 continue;
3734 }
3735 }
3736 result = Some(layer_result);
3737 break;
3738 }
3739
3740 // No sibling found at this level, try moving up to parent
3741 if !cursor.goto_parent() {
3742 break;
3743 }
3744 }
3745 }
3746
3747 result
3748 }
3749
3750 /// Find the next sibling syntax node at the given range.
3751 ///
3752 /// This function locates the syntax node that follows the node containing
3753 /// the given range. It searches hierarchically by:
3754 /// 1. Finding the node that contains the given range
3755 /// 2. Looking for the next sibling at the same tree level
3756 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
3757 ///
3758 /// Returns `None` if there is no next sibling at any ancestor level.
3759 pub fn syntax_next_sibling<'a, T: ToOffset>(
3760 &'a self,
3761 range: Range<T>,
3762 ) -> Option<tree_sitter::Node<'a>> {
3763 let range = range.start.to_offset(self)..range.end.to_offset(self);
3764 let mut result: Option<tree_sitter::Node<'a>> = None;
3765
3766 for layer in self
3767 .syntax
3768 .layers_for_range(range.clone(), &self.text, true)
3769 {
3770 let mut cursor = layer.node().walk();
3771
3772 // Find the node that contains the range
3773 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
3774 continue;
3775 }
3776
3777 // Look for the next sibling, moving up ancestor levels if needed
3778 loop {
3779 if cursor.goto_next_sibling() {
3780 let layer_result = cursor.node();
3781
3782 if let Some(previous_result) = &result {
3783 if previous_result.byte_range().start > layer_result.byte_range().start {
3784 continue;
3785 }
3786 }
3787 result = Some(layer_result);
3788 break;
3789 }
3790
3791 // No sibling found at this level, try moving up to parent
3792 if !cursor.goto_parent() {
3793 break;
3794 }
3795 }
3796 }
3797
3798 result
3799 }
3800
3801 /// Returns the root syntax node within the given row
3802 pub fn syntax_root_ancestor(&self, position: Anchor) -> Option<tree_sitter::Node<'_>> {
3803 let start_offset = position.to_offset(self);
3804
3805 let row = self.summary_for_anchor::<text::PointUtf16>(&position).row as usize;
3806
3807 let layer = self
3808 .syntax
3809 .layers_for_range(start_offset..start_offset, &self.text, true)
3810 .next()?;
3811
3812 let mut cursor = layer.node().walk();
3813
3814 // Descend to the first leaf that touches the start of the range.
3815 while cursor.goto_first_child_for_byte(start_offset).is_some() {
3816 if cursor.node().end_byte() == start_offset {
3817 cursor.goto_next_sibling();
3818 }
3819 }
3820
3821 // Ascend to the root node within the same row.
3822 while cursor.goto_parent() {
3823 if cursor.node().start_position().row != row {
3824 break;
3825 }
3826 }
3827
3828 Some(cursor.node())
3829 }
3830
3831 /// Returns the outline for the buffer.
3832 ///
3833 /// This method allows passing an optional [`SyntaxTheme`] to
3834 /// syntax-highlight the returned symbols.
3835 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Outline<Anchor> {
3836 Outline::new(self.outline_items_containing(0..self.len(), true, theme))
3837 }
3838
3839 /// Returns all the symbols that contain the given position.
3840 ///
3841 /// This method allows passing an optional [`SyntaxTheme`] to
3842 /// syntax-highlight the returned symbols.
3843 pub fn symbols_containing<T: ToOffset>(
3844 &self,
3845 position: T,
3846 theme: Option<&SyntaxTheme>,
3847 ) -> Vec<OutlineItem<Anchor>> {
3848 let position = position.to_offset(self);
3849 let start = self.clip_offset(position.saturating_sub(1), Bias::Left);
3850 let end = self.clip_offset(position + 1, Bias::Right);
3851 let mut items = self.outline_items_containing(start..end, false, theme);
3852 let mut prev_depth = None;
3853 items.retain(|item| {
3854 let result = prev_depth.is_none_or(|prev_depth| item.depth > prev_depth);
3855 prev_depth = Some(item.depth);
3856 result
3857 });
3858 items
3859 }
3860
3861 pub fn outline_range_containing<T: ToOffset>(&self, range: Range<T>) -> Option<Range<Point>> {
3862 let range = range.to_offset(self);
3863 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3864 grammar.outline_config.as_ref().map(|c| &c.query)
3865 });
3866 let configs = matches
3867 .grammars()
3868 .iter()
3869 .map(|g| g.outline_config.as_ref().unwrap())
3870 .collect::<Vec<_>>();
3871
3872 while let Some(mat) = matches.peek() {
3873 let config = &configs[mat.grammar_index];
3874 let containing_item_node = maybe!({
3875 let item_node = mat.captures.iter().find_map(|cap| {
3876 if cap.index == config.item_capture_ix {
3877 Some(cap.node)
3878 } else {
3879 None
3880 }
3881 })?;
3882
3883 let item_byte_range = item_node.byte_range();
3884 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3885 None
3886 } else {
3887 Some(item_node)
3888 }
3889 });
3890
3891 if let Some(item_node) = containing_item_node {
3892 return Some(
3893 Point::from_ts_point(item_node.start_position())
3894 ..Point::from_ts_point(item_node.end_position()),
3895 );
3896 }
3897
3898 matches.advance();
3899 }
3900 None
3901 }
3902
3903 pub fn outline_items_containing<T: ToOffset>(
3904 &self,
3905 range: Range<T>,
3906 include_extra_context: bool,
3907 theme: Option<&SyntaxTheme>,
3908 ) -> Vec<OutlineItem<Anchor>> {
3909 self.outline_items_containing_internal(
3910 range,
3911 include_extra_context,
3912 theme,
3913 |this, range| this.anchor_after(range.start)..this.anchor_before(range.end),
3914 )
3915 }
3916
3917 pub fn outline_items_as_points_containing<T: ToOffset>(
3918 &self,
3919 range: Range<T>,
3920 include_extra_context: bool,
3921 theme: Option<&SyntaxTheme>,
3922 ) -> Vec<OutlineItem<Point>> {
3923 self.outline_items_containing_internal(range, include_extra_context, theme, |_, range| {
3924 range
3925 })
3926 }
3927
3928 fn outline_items_containing_internal<T: ToOffset, U>(
3929 &self,
3930 range: Range<T>,
3931 include_extra_context: bool,
3932 theme: Option<&SyntaxTheme>,
3933 range_callback: fn(&Self, Range<Point>) -> Range<U>,
3934 ) -> Vec<OutlineItem<U>> {
3935 let range = range.to_offset(self);
3936 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3937 grammar.outline_config.as_ref().map(|c| &c.query)
3938 });
3939
3940 let mut items = Vec::new();
3941 let mut annotation_row_ranges: Vec<Range<u32>> = Vec::new();
3942 while let Some(mat) = matches.peek() {
3943 let config = matches.grammars()[mat.grammar_index]
3944 .outline_config
3945 .as_ref()
3946 .unwrap();
3947 if let Some(item) =
3948 self.next_outline_item(config, &mat, &range, include_extra_context, theme)
3949 {
3950 items.push(item);
3951 } else if let Some(capture) = mat
3952 .captures
3953 .iter()
3954 .find(|capture| Some(capture.index) == config.annotation_capture_ix)
3955 {
3956 let capture_range = capture.node.start_position()..capture.node.end_position();
3957 let mut capture_row_range =
3958 capture_range.start.row as u32..capture_range.end.row as u32;
3959 if capture_range.end.row > capture_range.start.row && capture_range.end.column == 0
3960 {
3961 capture_row_range.end -= 1;
3962 }
3963 if let Some(last_row_range) = annotation_row_ranges.last_mut() {
3964 if last_row_range.end >= capture_row_range.start.saturating_sub(1) {
3965 last_row_range.end = capture_row_range.end;
3966 } else {
3967 annotation_row_ranges.push(capture_row_range);
3968 }
3969 } else {
3970 annotation_row_ranges.push(capture_row_range);
3971 }
3972 }
3973 matches.advance();
3974 }
3975
3976 items.sort_by_key(|item| (item.range.start, Reverse(item.range.end)));
3977
3978 // Assign depths based on containment relationships and convert to anchors.
3979 let mut item_ends_stack = Vec::<Point>::new();
3980 let mut anchor_items = Vec::new();
3981 let mut annotation_row_ranges = annotation_row_ranges.into_iter().peekable();
3982 for item in items {
3983 while let Some(last_end) = item_ends_stack.last().copied() {
3984 if last_end < item.range.end {
3985 item_ends_stack.pop();
3986 } else {
3987 break;
3988 }
3989 }
3990
3991 let mut annotation_row_range = None;
3992 while let Some(next_annotation_row_range) = annotation_row_ranges.peek() {
3993 let row_preceding_item = item.range.start.row.saturating_sub(1);
3994 if next_annotation_row_range.end < row_preceding_item {
3995 annotation_row_ranges.next();
3996 } else {
3997 if next_annotation_row_range.end == row_preceding_item {
3998 annotation_row_range = Some(next_annotation_row_range.clone());
3999 annotation_row_ranges.next();
4000 }
4001 break;
4002 }
4003 }
4004
4005 anchor_items.push(OutlineItem {
4006 depth: item_ends_stack.len(),
4007 range: range_callback(self, item.range.clone()),
4008 source_range_for_text: range_callback(self, item.source_range_for_text.clone()),
4009 text: item.text,
4010 highlight_ranges: item.highlight_ranges,
4011 name_ranges: item.name_ranges,
4012 body_range: item.body_range.map(|r| range_callback(self, r)),
4013 annotation_range: annotation_row_range.map(|annotation_range| {
4014 let point_range = Point::new(annotation_range.start, 0)
4015 ..Point::new(annotation_range.end, self.line_len(annotation_range.end));
4016 range_callback(self, point_range)
4017 }),
4018 });
4019 item_ends_stack.push(item.range.end);
4020 }
4021
4022 anchor_items
4023 }
4024
4025 fn next_outline_item(
4026 &self,
4027 config: &OutlineConfig,
4028 mat: &SyntaxMapMatch,
4029 range: &Range<usize>,
4030 include_extra_context: bool,
4031 theme: Option<&SyntaxTheme>,
4032 ) -> Option<OutlineItem<Point>> {
4033 let item_node = mat.captures.iter().find_map(|cap| {
4034 if cap.index == config.item_capture_ix {
4035 Some(cap.node)
4036 } else {
4037 None
4038 }
4039 })?;
4040
4041 let item_byte_range = item_node.byte_range();
4042 if item_byte_range.end < range.start || item_byte_range.start > range.end {
4043 return None;
4044 }
4045 let item_point_range = Point::from_ts_point(item_node.start_position())
4046 ..Point::from_ts_point(item_node.end_position());
4047
4048 let mut open_point = None;
4049 let mut close_point = None;
4050
4051 let mut buffer_ranges = Vec::new();
4052 let mut add_to_buffer_ranges = |node: tree_sitter::Node, node_is_name| {
4053 let mut range = node.start_byte()..node.end_byte();
4054 let start = node.start_position();
4055 if node.end_position().row > start.row {
4056 range.end = range.start + self.line_len(start.row as u32) as usize - start.column;
4057 }
4058
4059 if !range.is_empty() {
4060 buffer_ranges.push((range, node_is_name));
4061 }
4062 };
4063
4064 for capture in mat.captures {
4065 if capture.index == config.name_capture_ix {
4066 add_to_buffer_ranges(capture.node, true);
4067 } else if Some(capture.index) == config.context_capture_ix
4068 || (Some(capture.index) == config.extra_context_capture_ix && include_extra_context)
4069 {
4070 add_to_buffer_ranges(capture.node, false);
4071 } else {
4072 if Some(capture.index) == config.open_capture_ix {
4073 open_point = Some(Point::from_ts_point(capture.node.end_position()));
4074 } else if Some(capture.index) == config.close_capture_ix {
4075 close_point = Some(Point::from_ts_point(capture.node.start_position()));
4076 }
4077 }
4078 }
4079
4080 if buffer_ranges.is_empty() {
4081 return None;
4082 }
4083 let source_range_for_text =
4084 buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end;
4085
4086 let mut text = String::new();
4087 let mut highlight_ranges = Vec::new();
4088 let mut name_ranges = Vec::new();
4089 let mut chunks = self.chunks(source_range_for_text.clone(), true);
4090 let mut last_buffer_range_end = 0;
4091 for (buffer_range, is_name) in buffer_ranges {
4092 let space_added = !text.is_empty() && buffer_range.start > last_buffer_range_end;
4093 if space_added {
4094 text.push(' ');
4095 }
4096 let before_append_len = text.len();
4097 let mut offset = buffer_range.start;
4098 chunks.seek(buffer_range.clone());
4099 for mut chunk in chunks.by_ref() {
4100 if chunk.text.len() > buffer_range.end - offset {
4101 chunk.text = &chunk.text[0..(buffer_range.end - offset)];
4102 offset = buffer_range.end;
4103 } else {
4104 offset += chunk.text.len();
4105 }
4106 let style = chunk
4107 .syntax_highlight_id
4108 .zip(theme)
4109 .and_then(|(highlight, theme)| highlight.style(theme));
4110 if let Some(style) = style {
4111 let start = text.len();
4112 let end = start + chunk.text.len();
4113 highlight_ranges.push((start..end, style));
4114 }
4115 text.push_str(chunk.text);
4116 if offset >= buffer_range.end {
4117 break;
4118 }
4119 }
4120 if is_name {
4121 let after_append_len = text.len();
4122 let start = if space_added && !name_ranges.is_empty() {
4123 before_append_len - 1
4124 } else {
4125 before_append_len
4126 };
4127 name_ranges.push(start..after_append_len);
4128 }
4129 last_buffer_range_end = buffer_range.end;
4130 }
4131
4132 Some(OutlineItem {
4133 depth: 0, // We'll calculate the depth later
4134 range: item_point_range,
4135 source_range_for_text: source_range_for_text.to_point(self),
4136 text,
4137 highlight_ranges,
4138 name_ranges,
4139 body_range: open_point.zip(close_point).map(|(start, end)| start..end),
4140 annotation_range: None,
4141 })
4142 }
4143
4144 pub fn function_body_fold_ranges<T: ToOffset>(
4145 &self,
4146 within: Range<T>,
4147 ) -> impl Iterator<Item = Range<usize>> + '_ {
4148 self.text_object_ranges(within, TreeSitterOptions::default())
4149 .filter_map(|(range, obj)| (obj == TextObject::InsideFunction).then_some(range))
4150 }
4151
4152 /// For each grammar in the language, runs the provided
4153 /// [`tree_sitter::Query`] against the given range.
4154 pub fn matches(
4155 &self,
4156 range: Range<usize>,
4157 query: fn(&Grammar) -> Option<&tree_sitter::Query>,
4158 ) -> SyntaxMapMatches<'_> {
4159 self.syntax.matches(range, self, query)
4160 }
4161
4162 /// Finds all [`RowChunks`] applicable to the given range, then returns all bracket pairs that intersect with those chunks.
4163 /// Hence, may return more bracket pairs than the range contains.
4164 ///
4165 /// Will omit known chunks.
4166 /// The resulting bracket match collections are not ordered.
4167 pub fn fetch_bracket_ranges(
4168 &self,
4169 range: Range<usize>,
4170 known_chunks: Option<(&Global, &HashSet<Range<BufferRow>>)>,
4171 ) -> HashMap<Range<BufferRow>, Vec<BracketMatch>> {
4172 let mut tree_sitter_data = self.latest_tree_sitter_data().clone();
4173
4174 let known_chunks = match known_chunks {
4175 Some((known_version, known_chunks)) => {
4176 if !tree_sitter_data
4177 .chunks
4178 .version()
4179 .changed_since(known_version)
4180 {
4181 known_chunks.clone()
4182 } else {
4183 HashSet::default()
4184 }
4185 }
4186 None => HashSet::default(),
4187 };
4188
4189 let mut new_bracket_matches = HashMap::default();
4190 let mut all_bracket_matches = HashMap::default();
4191
4192 for chunk in tree_sitter_data
4193 .chunks
4194 .applicable_chunks(&[self.anchor_before(range.start)..self.anchor_after(range.end)])
4195 {
4196 if known_chunks.contains(&chunk.row_range()) {
4197 continue;
4198 }
4199 let Some(chunk_range) = tree_sitter_data.chunks.chunk_range(chunk) else {
4200 continue;
4201 };
4202 let chunk_range = chunk_range.to_offset(&tree_sitter_data.chunks.snapshot);
4203
4204 let bracket_matches = match tree_sitter_data.brackets_by_chunks[chunk.id].take() {
4205 Some(cached_brackets) => cached_brackets,
4206 None => {
4207 let mut bracket_pairs_ends = Vec::new();
4208 let mut matches =
4209 self.syntax
4210 .matches(chunk_range.clone(), &self.text, |grammar| {
4211 grammar.brackets_config.as_ref().map(|c| &c.query)
4212 });
4213 let configs = matches
4214 .grammars()
4215 .iter()
4216 .map(|grammar| grammar.brackets_config.as_ref().unwrap())
4217 .collect::<Vec<_>>();
4218
4219 let chunk_range = chunk_range.clone();
4220 let new_matches = iter::from_fn(move || {
4221 while let Some(mat) = matches.peek() {
4222 let mut open = None;
4223 let mut close = None;
4224 let config = configs[mat.grammar_index];
4225 let pattern = &config.patterns[mat.pattern_index];
4226 for capture in mat.captures {
4227 if capture.index == config.open_capture_ix {
4228 open = Some(capture.node.byte_range());
4229 } else if capture.index == config.close_capture_ix {
4230 close = Some(capture.node.byte_range());
4231 }
4232 }
4233
4234 matches.advance();
4235
4236 let Some((open_range, close_range)) = open.zip(close) else {
4237 continue;
4238 };
4239
4240 let bracket_range = open_range.start..=close_range.end;
4241 if !bracket_range.overlaps(&chunk_range) {
4242 continue;
4243 }
4244
4245 return Some((open_range, close_range, pattern));
4246 }
4247 None
4248 })
4249 .sorted_by_key(|(open_range, _, _)| open_range.start)
4250 .map(|(open_range, close_range, pattern)| {
4251 while let Some(&last_bracket_end) = bracket_pairs_ends.last() {
4252 if last_bracket_end <= open_range.start {
4253 bracket_pairs_ends.pop();
4254 } else {
4255 break;
4256 }
4257 }
4258
4259 let depth = bracket_pairs_ends.len();
4260 bracket_pairs_ends.push(close_range.end);
4261
4262 BracketMatch {
4263 open_range,
4264 close_range,
4265 newline_only: pattern.newline_only,
4266 color_index: pattern.rainbow_exclude.not().then_some(depth),
4267 }
4268 })
4269 .collect::<Vec<_>>();
4270
4271 new_bracket_matches.insert(chunk.id, new_matches.clone());
4272 new_matches
4273 }
4274 };
4275 all_bracket_matches.insert(chunk.row_range(), bracket_matches);
4276 }
4277
4278 let mut latest_tree_sitter_data = self.latest_tree_sitter_data();
4279 if latest_tree_sitter_data.chunks.version() == &self.version {
4280 for (chunk_id, new_matches) in new_bracket_matches {
4281 let old_chunks = &mut latest_tree_sitter_data.brackets_by_chunks[chunk_id];
4282 if old_chunks.is_none() {
4283 *old_chunks = Some(new_matches);
4284 }
4285 }
4286 }
4287
4288 all_bracket_matches
4289 }
4290
4291 fn latest_tree_sitter_data(&self) -> MutexGuard<'_, RawMutex, TreeSitterData> {
4292 let mut tree_sitter_data = self.tree_sitter_data.lock();
4293 if self
4294 .version
4295 .changed_since(tree_sitter_data.chunks.version())
4296 {
4297 *tree_sitter_data = TreeSitterData::new(self.text.clone());
4298 }
4299 tree_sitter_data
4300 }
4301
4302 pub fn all_bracket_ranges(&self, range: Range<usize>) -> impl Iterator<Item = BracketMatch> {
4303 self.fetch_bracket_ranges(range.clone(), None)
4304 .into_values()
4305 .flatten()
4306 .filter(move |bracket_match| {
4307 let bracket_range = bracket_match.open_range.start..=bracket_match.close_range.end;
4308 bracket_range.overlaps(&range)
4309 })
4310 }
4311
4312 /// Returns bracket range pairs overlapping or adjacent to `range`
4313 pub fn bracket_ranges<T: ToOffset>(
4314 &self,
4315 range: Range<T>,
4316 ) -> impl Iterator<Item = BracketMatch> + '_ {
4317 // Find bracket pairs that *inclusively* contain the given range.
4318 let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self);
4319 self.all_bracket_ranges(range)
4320 .filter(|pair| !pair.newline_only)
4321 }
4322
4323 pub fn debug_variables_query<T: ToOffset>(
4324 &self,
4325 range: Range<T>,
4326 ) -> impl Iterator<Item = (Range<usize>, DebuggerTextObject)> + '_ {
4327 let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self);
4328
4329 let mut matches = self.syntax.matches_with_options(
4330 range.clone(),
4331 &self.text,
4332 TreeSitterOptions::default(),
4333 |grammar| grammar.debug_variables_config.as_ref().map(|c| &c.query),
4334 );
4335
4336 let configs = matches
4337 .grammars()
4338 .iter()
4339 .map(|grammar| grammar.debug_variables_config.as_ref())
4340 .collect::<Vec<_>>();
4341
4342 let mut captures = Vec::<(Range<usize>, DebuggerTextObject)>::new();
4343
4344 iter::from_fn(move || {
4345 loop {
4346 while let Some(capture) = captures.pop() {
4347 if capture.0.overlaps(&range) {
4348 return Some(capture);
4349 }
4350 }
4351
4352 let mat = matches.peek()?;
4353
4354 let Some(config) = configs[mat.grammar_index].as_ref() else {
4355 matches.advance();
4356 continue;
4357 };
4358
4359 for capture in mat.captures {
4360 let Some(ix) = config
4361 .objects_by_capture_ix
4362 .binary_search_by_key(&capture.index, |e| e.0)
4363 .ok()
4364 else {
4365 continue;
4366 };
4367 let text_object = config.objects_by_capture_ix[ix].1;
4368 let byte_range = capture.node.byte_range();
4369
4370 let mut found = false;
4371 for (range, existing) in captures.iter_mut() {
4372 if existing == &text_object {
4373 range.start = range.start.min(byte_range.start);
4374 range.end = range.end.max(byte_range.end);
4375 found = true;
4376 break;
4377 }
4378 }
4379
4380 if !found {
4381 captures.push((byte_range, text_object));
4382 }
4383 }
4384
4385 matches.advance();
4386 }
4387 })
4388 }
4389
4390 pub fn text_object_ranges<T: ToOffset>(
4391 &self,
4392 range: Range<T>,
4393 options: TreeSitterOptions,
4394 ) -> impl Iterator<Item = (Range<usize>, TextObject)> + '_ {
4395 let range =
4396 range.start.to_previous_offset(self)..self.len().min(range.end.to_next_offset(self));
4397
4398 let mut matches =
4399 self.syntax
4400 .matches_with_options(range.clone(), &self.text, options, |grammar| {
4401 grammar.text_object_config.as_ref().map(|c| &c.query)
4402 });
4403
4404 let configs = matches
4405 .grammars()
4406 .iter()
4407 .map(|grammar| grammar.text_object_config.as_ref())
4408 .collect::<Vec<_>>();
4409
4410 let mut captures = Vec::<(Range<usize>, TextObject)>::new();
4411
4412 iter::from_fn(move || {
4413 loop {
4414 while let Some(capture) = captures.pop() {
4415 if capture.0.overlaps(&range) {
4416 return Some(capture);
4417 }
4418 }
4419
4420 let mat = matches.peek()?;
4421
4422 let Some(config) = configs[mat.grammar_index].as_ref() else {
4423 matches.advance();
4424 continue;
4425 };
4426
4427 for capture in mat.captures {
4428 let Some(ix) = config
4429 .text_objects_by_capture_ix
4430 .binary_search_by_key(&capture.index, |e| e.0)
4431 .ok()
4432 else {
4433 continue;
4434 };
4435 let text_object = config.text_objects_by_capture_ix[ix].1;
4436 let byte_range = capture.node.byte_range();
4437
4438 let mut found = false;
4439 for (range, existing) in captures.iter_mut() {
4440 if existing == &text_object {
4441 range.start = range.start.min(byte_range.start);
4442 range.end = range.end.max(byte_range.end);
4443 found = true;
4444 break;
4445 }
4446 }
4447
4448 if !found {
4449 captures.push((byte_range, text_object));
4450 }
4451 }
4452
4453 matches.advance();
4454 }
4455 })
4456 }
4457
4458 /// Returns enclosing bracket ranges containing the given range
4459 pub fn enclosing_bracket_ranges<T: ToOffset>(
4460 &self,
4461 range: Range<T>,
4462 ) -> impl Iterator<Item = BracketMatch> + '_ {
4463 let range = range.start.to_offset(self)..range.end.to_offset(self);
4464
4465 self.bracket_ranges(range.clone()).filter(move |pair| {
4466 pair.open_range.start <= range.start && pair.close_range.end >= range.end
4467 })
4468 }
4469
4470 /// Returns the smallest enclosing bracket ranges containing the given range or None if no brackets contain range
4471 ///
4472 /// Can optionally pass a range_filter to filter the ranges of brackets to consider
4473 pub fn innermost_enclosing_bracket_ranges<T: ToOffset>(
4474 &self,
4475 range: Range<T>,
4476 range_filter: Option<&dyn Fn(Range<usize>, Range<usize>) -> bool>,
4477 ) -> Option<(Range<usize>, Range<usize>)> {
4478 let range = range.start.to_offset(self)..range.end.to_offset(self);
4479
4480 // Get the ranges of the innermost pair of brackets.
4481 let mut result: Option<(Range<usize>, Range<usize>)> = None;
4482
4483 for pair in self.enclosing_bracket_ranges(range) {
4484 if let Some(range_filter) = range_filter
4485 && !range_filter(pair.open_range.clone(), pair.close_range.clone())
4486 {
4487 continue;
4488 }
4489
4490 let len = pair.close_range.end - pair.open_range.start;
4491
4492 if let Some((existing_open, existing_close)) = &result {
4493 let existing_len = existing_close.end - existing_open.start;
4494 if len > existing_len {
4495 continue;
4496 }
4497 }
4498
4499 result = Some((pair.open_range, pair.close_range));
4500 }
4501
4502 result
4503 }
4504
4505 /// Returns anchor ranges for any matches of the redaction query.
4506 /// The buffer can be associated with multiple languages, and the redaction query associated with each
4507 /// will be run on the relevant section of the buffer.
4508 pub fn redacted_ranges<T: ToOffset>(
4509 &self,
4510 range: Range<T>,
4511 ) -> impl Iterator<Item = Range<usize>> + '_ {
4512 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4513 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4514 grammar
4515 .redactions_config
4516 .as_ref()
4517 .map(|config| &config.query)
4518 });
4519
4520 let configs = syntax_matches
4521 .grammars()
4522 .iter()
4523 .map(|grammar| grammar.redactions_config.as_ref())
4524 .collect::<Vec<_>>();
4525
4526 iter::from_fn(move || {
4527 let redacted_range = syntax_matches
4528 .peek()
4529 .and_then(|mat| {
4530 configs[mat.grammar_index].and_then(|config| {
4531 mat.captures
4532 .iter()
4533 .find(|capture| capture.index == config.redaction_capture_ix)
4534 })
4535 })
4536 .map(|mat| mat.node.byte_range());
4537 syntax_matches.advance();
4538 redacted_range
4539 })
4540 }
4541
4542 pub fn injections_intersecting_range<T: ToOffset>(
4543 &self,
4544 range: Range<T>,
4545 ) -> impl Iterator<Item = (Range<usize>, &Arc<Language>)> + '_ {
4546 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4547
4548 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4549 grammar
4550 .injection_config
4551 .as_ref()
4552 .map(|config| &config.query)
4553 });
4554
4555 let configs = syntax_matches
4556 .grammars()
4557 .iter()
4558 .map(|grammar| grammar.injection_config.as_ref())
4559 .collect::<Vec<_>>();
4560
4561 iter::from_fn(move || {
4562 let ranges = syntax_matches.peek().and_then(|mat| {
4563 let config = &configs[mat.grammar_index]?;
4564 let content_capture_range = mat.captures.iter().find_map(|capture| {
4565 if capture.index == config.content_capture_ix {
4566 Some(capture.node.byte_range())
4567 } else {
4568 None
4569 }
4570 })?;
4571 let language = self.language_at(content_capture_range.start)?;
4572 Some((content_capture_range, language))
4573 });
4574 syntax_matches.advance();
4575 ranges
4576 })
4577 }
4578
4579 pub fn runnable_ranges(
4580 &self,
4581 offset_range: Range<usize>,
4582 ) -> impl Iterator<Item = RunnableRange> + '_ {
4583 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4584 grammar.runnable_config.as_ref().map(|config| &config.query)
4585 });
4586
4587 let test_configs = syntax_matches
4588 .grammars()
4589 .iter()
4590 .map(|grammar| grammar.runnable_config.as_ref())
4591 .collect::<Vec<_>>();
4592
4593 iter::from_fn(move || {
4594 loop {
4595 let mat = syntax_matches.peek()?;
4596
4597 let test_range = test_configs[mat.grammar_index].and_then(|test_configs| {
4598 let mut run_range = None;
4599 let full_range = mat.captures.iter().fold(
4600 Range {
4601 start: usize::MAX,
4602 end: 0,
4603 },
4604 |mut acc, next| {
4605 let byte_range = next.node.byte_range();
4606 if acc.start > byte_range.start {
4607 acc.start = byte_range.start;
4608 }
4609 if acc.end < byte_range.end {
4610 acc.end = byte_range.end;
4611 }
4612 acc
4613 },
4614 );
4615 if full_range.start > full_range.end {
4616 // We did not find a full spanning range of this match.
4617 return None;
4618 }
4619 let extra_captures: SmallVec<[_; 1]> =
4620 SmallVec::from_iter(mat.captures.iter().filter_map(|capture| {
4621 test_configs
4622 .extra_captures
4623 .get(capture.index as usize)
4624 .cloned()
4625 .and_then(|tag_name| match tag_name {
4626 RunnableCapture::Named(name) => {
4627 Some((capture.node.byte_range(), name))
4628 }
4629 RunnableCapture::Run => {
4630 let _ = run_range.insert(capture.node.byte_range());
4631 None
4632 }
4633 })
4634 }));
4635 let run_range = run_range?;
4636 let tags = test_configs
4637 .query
4638 .property_settings(mat.pattern_index)
4639 .iter()
4640 .filter_map(|property| {
4641 if *property.key == *"tag" {
4642 property
4643 .value
4644 .as_ref()
4645 .map(|value| RunnableTag(value.to_string().into()))
4646 } else {
4647 None
4648 }
4649 })
4650 .collect();
4651 let extra_captures = extra_captures
4652 .into_iter()
4653 .map(|(range, name)| {
4654 (
4655 name.to_string(),
4656 self.text_for_range(range).collect::<String>(),
4657 )
4658 })
4659 .collect();
4660 // All tags should have the same range.
4661 Some(RunnableRange {
4662 run_range,
4663 full_range,
4664 runnable: Runnable {
4665 tags,
4666 language: mat.language,
4667 buffer: self.remote_id(),
4668 },
4669 extra_captures,
4670 buffer_id: self.remote_id(),
4671 })
4672 });
4673
4674 syntax_matches.advance();
4675 if test_range.is_some() {
4676 // It's fine for us to short-circuit on .peek()? returning None. We don't want to return None from this iter if we
4677 // had a capture that did not contain a run marker, hence we'll just loop around for the next capture.
4678 return test_range;
4679 }
4680 }
4681 })
4682 }
4683
4684 /// Returns selections for remote peers intersecting the given range.
4685 #[allow(clippy::type_complexity)]
4686 pub fn selections_in_range(
4687 &self,
4688 range: Range<Anchor>,
4689 include_local: bool,
4690 ) -> impl Iterator<
4691 Item = (
4692 ReplicaId,
4693 bool,
4694 CursorShape,
4695 impl Iterator<Item = &Selection<Anchor>> + '_,
4696 ),
4697 > + '_ {
4698 self.remote_selections
4699 .iter()
4700 .filter(move |(replica_id, set)| {
4701 (include_local || **replica_id != self.text.replica_id())
4702 && !set.selections.is_empty()
4703 })
4704 .map(move |(replica_id, set)| {
4705 let start_ix = match set.selections.binary_search_by(|probe| {
4706 probe.end.cmp(&range.start, self).then(Ordering::Greater)
4707 }) {
4708 Ok(ix) | Err(ix) => ix,
4709 };
4710 let end_ix = match set.selections.binary_search_by(|probe| {
4711 probe.start.cmp(&range.end, self).then(Ordering::Less)
4712 }) {
4713 Ok(ix) | Err(ix) => ix,
4714 };
4715
4716 (
4717 *replica_id,
4718 set.line_mode,
4719 set.cursor_shape,
4720 set.selections[start_ix..end_ix].iter(),
4721 )
4722 })
4723 }
4724
4725 /// Returns if the buffer contains any diagnostics.
4726 pub fn has_diagnostics(&self) -> bool {
4727 !self.diagnostics.is_empty()
4728 }
4729
4730 /// Returns all the diagnostics intersecting the given range.
4731 pub fn diagnostics_in_range<'a, T, O>(
4732 &'a self,
4733 search_range: Range<T>,
4734 reversed: bool,
4735 ) -> impl 'a + Iterator<Item = DiagnosticEntryRef<'a, O>>
4736 where
4737 T: 'a + Clone + ToOffset,
4738 O: 'a + FromAnchor,
4739 {
4740 let mut iterators: Vec<_> = self
4741 .diagnostics
4742 .iter()
4743 .map(|(_, collection)| {
4744 collection
4745 .range::<T, text::Anchor>(search_range.clone(), self, true, reversed)
4746 .peekable()
4747 })
4748 .collect();
4749
4750 std::iter::from_fn(move || {
4751 let (next_ix, _) = iterators
4752 .iter_mut()
4753 .enumerate()
4754 .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
4755 .min_by(|(_, a), (_, b)| {
4756 let cmp = a
4757 .range
4758 .start
4759 .cmp(&b.range.start, self)
4760 // when range is equal, sort by diagnostic severity
4761 .then(a.diagnostic.severity.cmp(&b.diagnostic.severity))
4762 // and stabilize order with group_id
4763 .then(a.diagnostic.group_id.cmp(&b.diagnostic.group_id));
4764 if reversed { cmp.reverse() } else { cmp }
4765 })?;
4766 iterators[next_ix]
4767 .next()
4768 .map(
4769 |DiagnosticEntryRef { range, diagnostic }| DiagnosticEntryRef {
4770 diagnostic,
4771 range: FromAnchor::from_anchor(&range.start, self)
4772 ..FromAnchor::from_anchor(&range.end, self),
4773 },
4774 )
4775 })
4776 }
4777
4778 /// Raw access to the diagnostic sets. Typically `diagnostic_groups` or `diagnostic_group`
4779 /// should be used instead.
4780 pub fn diagnostic_sets(&self) -> &SmallVec<[(LanguageServerId, DiagnosticSet); 2]> {
4781 &self.diagnostics
4782 }
4783
4784 /// Returns all the diagnostic groups associated with the given
4785 /// language server ID. If no language server ID is provided,
4786 /// all diagnostics groups are returned.
4787 pub fn diagnostic_groups(
4788 &self,
4789 language_server_id: Option<LanguageServerId>,
4790 ) -> Vec<(LanguageServerId, DiagnosticGroup<'_, Anchor>)> {
4791 let mut groups = Vec::new();
4792
4793 if let Some(language_server_id) = language_server_id {
4794 if let Ok(ix) = self
4795 .diagnostics
4796 .binary_search_by_key(&language_server_id, |e| e.0)
4797 {
4798 self.diagnostics[ix]
4799 .1
4800 .groups(language_server_id, &mut groups, self);
4801 }
4802 } else {
4803 for (language_server_id, diagnostics) in self.diagnostics.iter() {
4804 diagnostics.groups(*language_server_id, &mut groups, self);
4805 }
4806 }
4807
4808 groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
4809 let a_start = &group_a.entries[group_a.primary_ix].range.start;
4810 let b_start = &group_b.entries[group_b.primary_ix].range.start;
4811 a_start.cmp(b_start, self).then_with(|| id_a.cmp(id_b))
4812 });
4813
4814 groups
4815 }
4816
4817 /// Returns an iterator over the diagnostics for the given group.
4818 pub fn diagnostic_group<O>(
4819 &self,
4820 group_id: usize,
4821 ) -> impl Iterator<Item = DiagnosticEntryRef<'_, O>> + use<'_, O>
4822 where
4823 O: FromAnchor + 'static,
4824 {
4825 self.diagnostics
4826 .iter()
4827 .flat_map(move |(_, set)| set.group(group_id, self))
4828 }
4829
4830 /// An integer version number that accounts for all updates besides
4831 /// the buffer's text itself (which is versioned via a version vector).
4832 pub fn non_text_state_update_count(&self) -> usize {
4833 self.non_text_state_update_count
4834 }
4835
4836 /// An integer version that changes when the buffer's syntax changes.
4837 pub fn syntax_update_count(&self) -> usize {
4838 self.syntax.update_count()
4839 }
4840
4841 /// Returns a snapshot of underlying file.
4842 pub fn file(&self) -> Option<&Arc<dyn File>> {
4843 self.file.as_ref()
4844 }
4845
4846 pub fn resolve_file_path(&self, include_root: bool, cx: &App) -> Option<String> {
4847 if let Some(file) = self.file() {
4848 if file.path().file_name().is_none() || include_root {
4849 Some(file.full_path(cx).to_string_lossy().into_owned())
4850 } else {
4851 Some(file.path().display(file.path_style(cx)).to_string())
4852 }
4853 } else {
4854 None
4855 }
4856 }
4857
4858 pub fn words_in_range(&self, query: WordsQuery) -> BTreeMap<String, Range<Anchor>> {
4859 let query_str = query.fuzzy_contents;
4860 if query_str.is_some_and(|query| query.is_empty()) {
4861 return BTreeMap::default();
4862 }
4863
4864 let classifier = CharClassifier::new(self.language.clone().map(|language| LanguageScope {
4865 language,
4866 override_id: None,
4867 }));
4868
4869 let mut query_ix = 0;
4870 let query_chars = query_str.map(|query| query.chars().collect::<Vec<_>>());
4871 let query_len = query_chars.as_ref().map_or(0, |query| query.len());
4872
4873 let mut words = BTreeMap::default();
4874 let mut current_word_start_ix = None;
4875 let mut chunk_ix = query.range.start;
4876 for chunk in self.chunks(query.range, false) {
4877 for (i, c) in chunk.text.char_indices() {
4878 let ix = chunk_ix + i;
4879 if classifier.is_word(c) {
4880 if current_word_start_ix.is_none() {
4881 current_word_start_ix = Some(ix);
4882 }
4883
4884 if let Some(query_chars) = &query_chars
4885 && query_ix < query_len
4886 && c.to_lowercase().eq(query_chars[query_ix].to_lowercase())
4887 {
4888 query_ix += 1;
4889 }
4890 continue;
4891 } else if let Some(word_start) = current_word_start_ix.take()
4892 && query_ix == query_len
4893 {
4894 let word_range = self.anchor_before(word_start)..self.anchor_after(ix);
4895 let mut word_text = self.text_for_range(word_start..ix).peekable();
4896 let first_char = word_text
4897 .peek()
4898 .and_then(|first_chunk| first_chunk.chars().next());
4899 // Skip empty and "words" starting with digits as a heuristic to reduce useless completions
4900 if !query.skip_digits
4901 || first_char.is_none_or(|first_char| !first_char.is_digit(10))
4902 {
4903 words.insert(word_text.collect(), word_range);
4904 }
4905 }
4906 query_ix = 0;
4907 }
4908 chunk_ix += chunk.text.len();
4909 }
4910
4911 words
4912 }
4913}
4914
4915pub struct WordsQuery<'a> {
4916 /// Only returns words with all chars from the fuzzy string in them.
4917 pub fuzzy_contents: Option<&'a str>,
4918 /// Skips words that start with a digit.
4919 pub skip_digits: bool,
4920 /// Buffer offset range, to look for words.
4921 pub range: Range<usize>,
4922}
4923
4924fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
4925 indent_size_for_text(text.chars_at(Point::new(row, 0)))
4926}
4927
4928fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
4929 let mut result = IndentSize::spaces(0);
4930 for c in text {
4931 let kind = match c {
4932 ' ' => IndentKind::Space,
4933 '\t' => IndentKind::Tab,
4934 _ => break,
4935 };
4936 if result.len == 0 {
4937 result.kind = kind;
4938 }
4939 result.len += 1;
4940 }
4941 result
4942}
4943
4944impl Clone for BufferSnapshot {
4945 fn clone(&self) -> Self {
4946 Self {
4947 text: self.text.clone(),
4948 syntax: self.syntax.clone(),
4949 file: self.file.clone(),
4950 remote_selections: self.remote_selections.clone(),
4951 diagnostics: self.diagnostics.clone(),
4952 language: self.language.clone(),
4953 tree_sitter_data: self.tree_sitter_data.clone(),
4954 non_text_state_update_count: self.non_text_state_update_count,
4955 }
4956 }
4957}
4958
4959impl Deref for BufferSnapshot {
4960 type Target = text::BufferSnapshot;
4961
4962 fn deref(&self) -> &Self::Target {
4963 &self.text
4964 }
4965}
4966
4967unsafe impl Send for BufferChunks<'_> {}
4968
4969impl<'a> BufferChunks<'a> {
4970 pub(crate) fn new(
4971 text: &'a Rope,
4972 range: Range<usize>,
4973 syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
4974 diagnostics: bool,
4975 buffer_snapshot: Option<&'a BufferSnapshot>,
4976 ) -> Self {
4977 let mut highlights = None;
4978 if let Some((captures, highlight_maps)) = syntax {
4979 highlights = Some(BufferChunkHighlights {
4980 captures,
4981 next_capture: None,
4982 stack: Default::default(),
4983 highlight_maps,
4984 })
4985 }
4986
4987 let diagnostic_endpoints = diagnostics.then(|| Vec::new().into_iter().peekable());
4988 let chunks = text.chunks_in_range(range.clone());
4989
4990 let mut this = BufferChunks {
4991 range,
4992 buffer_snapshot,
4993 chunks,
4994 diagnostic_endpoints,
4995 error_depth: 0,
4996 warning_depth: 0,
4997 information_depth: 0,
4998 hint_depth: 0,
4999 unnecessary_depth: 0,
5000 underline: true,
5001 highlights,
5002 };
5003 this.initialize_diagnostic_endpoints();
5004 this
5005 }
5006
5007 /// Seeks to the given byte offset in the buffer.
5008 pub fn seek(&mut self, range: Range<usize>) {
5009 let old_range = std::mem::replace(&mut self.range, range.clone());
5010 self.chunks.set_range(self.range.clone());
5011 if let Some(highlights) = self.highlights.as_mut() {
5012 if old_range.start <= self.range.start && old_range.end >= self.range.end {
5013 // Reuse existing highlights stack, as the new range is a subrange of the old one.
5014 highlights
5015 .stack
5016 .retain(|(end_offset, _)| *end_offset > range.start);
5017 if let Some(capture) = &highlights.next_capture
5018 && range.start >= capture.node.start_byte()
5019 {
5020 let next_capture_end = capture.node.end_byte();
5021 if range.start < next_capture_end {
5022 highlights.stack.push((
5023 next_capture_end,
5024 highlights.highlight_maps[capture.grammar_index].get(capture.index),
5025 ));
5026 }
5027 highlights.next_capture.take();
5028 }
5029 } else if let Some(snapshot) = self.buffer_snapshot {
5030 let (captures, highlight_maps) = snapshot.get_highlights(self.range.clone());
5031 *highlights = BufferChunkHighlights {
5032 captures,
5033 next_capture: None,
5034 stack: Default::default(),
5035 highlight_maps,
5036 };
5037 } else {
5038 // We cannot obtain new highlights for a language-aware buffer iterator, as we don't have a buffer snapshot.
5039 // Seeking such BufferChunks is not supported.
5040 debug_assert!(
5041 false,
5042 "Attempted to seek on a language-aware buffer iterator without associated buffer snapshot"
5043 );
5044 }
5045
5046 highlights.captures.set_byte_range(self.range.clone());
5047 self.initialize_diagnostic_endpoints();
5048 }
5049 }
5050
5051 fn initialize_diagnostic_endpoints(&mut self) {
5052 if let Some(diagnostics) = self.diagnostic_endpoints.as_mut()
5053 && let Some(buffer) = self.buffer_snapshot
5054 {
5055 let mut diagnostic_endpoints = Vec::new();
5056 for entry in buffer.diagnostics_in_range::<_, usize>(self.range.clone(), false) {
5057 diagnostic_endpoints.push(DiagnosticEndpoint {
5058 offset: entry.range.start,
5059 is_start: true,
5060 severity: entry.diagnostic.severity,
5061 is_unnecessary: entry.diagnostic.is_unnecessary,
5062 underline: entry.diagnostic.underline,
5063 });
5064 diagnostic_endpoints.push(DiagnosticEndpoint {
5065 offset: entry.range.end,
5066 is_start: false,
5067 severity: entry.diagnostic.severity,
5068 is_unnecessary: entry.diagnostic.is_unnecessary,
5069 underline: entry.diagnostic.underline,
5070 });
5071 }
5072 diagnostic_endpoints
5073 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
5074 *diagnostics = diagnostic_endpoints.into_iter().peekable();
5075 self.hint_depth = 0;
5076 self.error_depth = 0;
5077 self.warning_depth = 0;
5078 self.information_depth = 0;
5079 }
5080 }
5081
5082 /// The current byte offset in the buffer.
5083 pub fn offset(&self) -> usize {
5084 self.range.start
5085 }
5086
5087 pub fn range(&self) -> Range<usize> {
5088 self.range.clone()
5089 }
5090
5091 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
5092 let depth = match endpoint.severity {
5093 DiagnosticSeverity::ERROR => &mut self.error_depth,
5094 DiagnosticSeverity::WARNING => &mut self.warning_depth,
5095 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
5096 DiagnosticSeverity::HINT => &mut self.hint_depth,
5097 _ => return,
5098 };
5099 if endpoint.is_start {
5100 *depth += 1;
5101 } else {
5102 *depth -= 1;
5103 }
5104
5105 if endpoint.is_unnecessary {
5106 if endpoint.is_start {
5107 self.unnecessary_depth += 1;
5108 } else {
5109 self.unnecessary_depth -= 1;
5110 }
5111 }
5112 }
5113
5114 fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
5115 if self.error_depth > 0 {
5116 Some(DiagnosticSeverity::ERROR)
5117 } else if self.warning_depth > 0 {
5118 Some(DiagnosticSeverity::WARNING)
5119 } else if self.information_depth > 0 {
5120 Some(DiagnosticSeverity::INFORMATION)
5121 } else if self.hint_depth > 0 {
5122 Some(DiagnosticSeverity::HINT)
5123 } else {
5124 None
5125 }
5126 }
5127
5128 fn current_code_is_unnecessary(&self) -> bool {
5129 self.unnecessary_depth > 0
5130 }
5131}
5132
5133impl<'a> Iterator for BufferChunks<'a> {
5134 type Item = Chunk<'a>;
5135
5136 fn next(&mut self) -> Option<Self::Item> {
5137 let mut next_capture_start = usize::MAX;
5138 let mut next_diagnostic_endpoint = usize::MAX;
5139
5140 if let Some(highlights) = self.highlights.as_mut() {
5141 while let Some((parent_capture_end, _)) = highlights.stack.last() {
5142 if *parent_capture_end <= self.range.start {
5143 highlights.stack.pop();
5144 } else {
5145 break;
5146 }
5147 }
5148
5149 if highlights.next_capture.is_none() {
5150 highlights.next_capture = highlights.captures.next();
5151 }
5152
5153 while let Some(capture) = highlights.next_capture.as_ref() {
5154 if self.range.start < capture.node.start_byte() {
5155 next_capture_start = capture.node.start_byte();
5156 break;
5157 } else {
5158 let highlight_id =
5159 highlights.highlight_maps[capture.grammar_index].get(capture.index);
5160 highlights
5161 .stack
5162 .push((capture.node.end_byte(), highlight_id));
5163 highlights.next_capture = highlights.captures.next();
5164 }
5165 }
5166 }
5167
5168 let mut diagnostic_endpoints = std::mem::take(&mut self.diagnostic_endpoints);
5169 if let Some(diagnostic_endpoints) = diagnostic_endpoints.as_mut() {
5170 while let Some(endpoint) = diagnostic_endpoints.peek().copied() {
5171 if endpoint.offset <= self.range.start {
5172 self.update_diagnostic_depths(endpoint);
5173 diagnostic_endpoints.next();
5174 self.underline = endpoint.underline;
5175 } else {
5176 next_diagnostic_endpoint = endpoint.offset;
5177 break;
5178 }
5179 }
5180 }
5181 self.diagnostic_endpoints = diagnostic_endpoints;
5182
5183 if let Some(ChunkBitmaps {
5184 text: chunk,
5185 chars: chars_map,
5186 tabs,
5187 }) = self.chunks.peek_with_bitmaps()
5188 {
5189 let chunk_start = self.range.start;
5190 let mut chunk_end = (self.chunks.offset() + chunk.len())
5191 .min(next_capture_start)
5192 .min(next_diagnostic_endpoint);
5193 let mut highlight_id = None;
5194 if let Some(highlights) = self.highlights.as_ref()
5195 && let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last()
5196 {
5197 chunk_end = chunk_end.min(*parent_capture_end);
5198 highlight_id = Some(*parent_highlight_id);
5199 }
5200 let bit_start = chunk_start - self.chunks.offset();
5201 let bit_end = chunk_end - self.chunks.offset();
5202
5203 let slice = &chunk[bit_start..bit_end];
5204
5205 let mask = 1u128.unbounded_shl(bit_end as u32).wrapping_sub(1);
5206 let tabs = (tabs >> bit_start) & mask;
5207 let chars = (chars_map >> bit_start) & mask;
5208
5209 self.range.start = chunk_end;
5210 if self.range.start == self.chunks.offset() + chunk.len() {
5211 self.chunks.next().unwrap();
5212 }
5213
5214 Some(Chunk {
5215 text: slice,
5216 syntax_highlight_id: highlight_id,
5217 underline: self.underline,
5218 diagnostic_severity: self.current_diagnostic_severity(),
5219 is_unnecessary: self.current_code_is_unnecessary(),
5220 tabs,
5221 chars,
5222 ..Chunk::default()
5223 })
5224 } else {
5225 None
5226 }
5227 }
5228}
5229
5230impl operation_queue::Operation for Operation {
5231 fn lamport_timestamp(&self) -> clock::Lamport {
5232 match self {
5233 Operation::Buffer(_) => {
5234 unreachable!("buffer operations should never be deferred at this layer")
5235 }
5236 Operation::UpdateDiagnostics {
5237 lamport_timestamp, ..
5238 }
5239 | Operation::UpdateSelections {
5240 lamport_timestamp, ..
5241 }
5242 | Operation::UpdateCompletionTriggers {
5243 lamport_timestamp, ..
5244 }
5245 | Operation::UpdateLineEnding {
5246 lamport_timestamp, ..
5247 } => *lamport_timestamp,
5248 }
5249 }
5250}
5251
5252impl Default for Diagnostic {
5253 fn default() -> Self {
5254 Self {
5255 source: Default::default(),
5256 source_kind: DiagnosticSourceKind::Other,
5257 code: None,
5258 code_description: None,
5259 severity: DiagnosticSeverity::ERROR,
5260 message: Default::default(),
5261 markdown: None,
5262 group_id: 0,
5263 is_primary: false,
5264 is_disk_based: false,
5265 is_unnecessary: false,
5266 underline: true,
5267 data: None,
5268 }
5269 }
5270}
5271
5272impl IndentSize {
5273 /// Returns an [`IndentSize`] representing the given spaces.
5274 pub fn spaces(len: u32) -> Self {
5275 Self {
5276 len,
5277 kind: IndentKind::Space,
5278 }
5279 }
5280
5281 /// Returns an [`IndentSize`] representing a tab.
5282 pub fn tab() -> Self {
5283 Self {
5284 len: 1,
5285 kind: IndentKind::Tab,
5286 }
5287 }
5288
5289 /// An iterator over the characters represented by this [`IndentSize`].
5290 pub fn chars(&self) -> impl Iterator<Item = char> {
5291 iter::repeat(self.char()).take(self.len as usize)
5292 }
5293
5294 /// The character representation of this [`IndentSize`].
5295 pub fn char(&self) -> char {
5296 match self.kind {
5297 IndentKind::Space => ' ',
5298 IndentKind::Tab => '\t',
5299 }
5300 }
5301
5302 /// Consumes the current [`IndentSize`] and returns a new one that has
5303 /// been shrunk or enlarged by the given size along the given direction.
5304 pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
5305 match direction {
5306 Ordering::Less => {
5307 if self.kind == size.kind && self.len >= size.len {
5308 self.len -= size.len;
5309 }
5310 }
5311 Ordering::Equal => {}
5312 Ordering::Greater => {
5313 if self.len == 0 {
5314 self = size;
5315 } else if self.kind == size.kind {
5316 self.len += size.len;
5317 }
5318 }
5319 }
5320 self
5321 }
5322
5323 pub fn len_with_expanded_tabs(&self, tab_size: NonZeroU32) -> usize {
5324 match self.kind {
5325 IndentKind::Space => self.len as usize,
5326 IndentKind::Tab => self.len as usize * tab_size.get() as usize,
5327 }
5328 }
5329}
5330
5331#[cfg(any(test, feature = "test-support"))]
5332pub struct TestFile {
5333 pub path: Arc<RelPath>,
5334 pub root_name: String,
5335 pub local_root: Option<PathBuf>,
5336}
5337
5338#[cfg(any(test, feature = "test-support"))]
5339impl File for TestFile {
5340 fn path(&self) -> &Arc<RelPath> {
5341 &self.path
5342 }
5343
5344 fn full_path(&self, _: &gpui::App) -> PathBuf {
5345 PathBuf::from(self.root_name.clone()).join(self.path.as_std_path())
5346 }
5347
5348 fn as_local(&self) -> Option<&dyn LocalFile> {
5349 if self.local_root.is_some() {
5350 Some(self)
5351 } else {
5352 None
5353 }
5354 }
5355
5356 fn disk_state(&self) -> DiskState {
5357 unimplemented!()
5358 }
5359
5360 fn file_name<'a>(&'a self, _: &'a gpui::App) -> &'a str {
5361 self.path().file_name().unwrap_or(self.root_name.as_ref())
5362 }
5363
5364 fn worktree_id(&self, _: &App) -> WorktreeId {
5365 WorktreeId::from_usize(0)
5366 }
5367
5368 fn to_proto(&self, _: &App) -> rpc::proto::File {
5369 unimplemented!()
5370 }
5371
5372 fn is_private(&self) -> bool {
5373 false
5374 }
5375
5376 fn path_style(&self, _cx: &App) -> PathStyle {
5377 PathStyle::local()
5378 }
5379}
5380
5381#[cfg(any(test, feature = "test-support"))]
5382impl LocalFile for TestFile {
5383 fn abs_path(&self, _cx: &App) -> PathBuf {
5384 PathBuf::from(self.local_root.as_ref().unwrap())
5385 .join(&self.root_name)
5386 .join(self.path.as_std_path())
5387 }
5388
5389 fn load(&self, _cx: &App) -> Task<Result<String>> {
5390 unimplemented!()
5391 }
5392
5393 fn load_bytes(&self, _cx: &App) -> Task<Result<Vec<u8>>> {
5394 unimplemented!()
5395 }
5396}
5397
5398pub(crate) fn contiguous_ranges(
5399 values: impl Iterator<Item = u32>,
5400 max_len: usize,
5401) -> impl Iterator<Item = Range<u32>> {
5402 let mut values = values;
5403 let mut current_range: Option<Range<u32>> = None;
5404 std::iter::from_fn(move || {
5405 loop {
5406 if let Some(value) = values.next() {
5407 if let Some(range) = &mut current_range
5408 && value == range.end
5409 && range.len() < max_len
5410 {
5411 range.end += 1;
5412 continue;
5413 }
5414
5415 let prev_range = current_range.clone();
5416 current_range = Some(value..(value + 1));
5417 if prev_range.is_some() {
5418 return prev_range;
5419 }
5420 } else {
5421 return current_range.take();
5422 }
5423 }
5424 })
5425}
5426
5427#[derive(Default, Debug)]
5428pub struct CharClassifier {
5429 scope: Option<LanguageScope>,
5430 scope_context: Option<CharScopeContext>,
5431 ignore_punctuation: bool,
5432}
5433
5434impl CharClassifier {
5435 pub fn new(scope: Option<LanguageScope>) -> Self {
5436 Self {
5437 scope,
5438 scope_context: None,
5439 ignore_punctuation: false,
5440 }
5441 }
5442
5443 pub fn scope_context(self, scope_context: Option<CharScopeContext>) -> Self {
5444 Self {
5445 scope_context,
5446 ..self
5447 }
5448 }
5449
5450 pub fn ignore_punctuation(self, ignore_punctuation: bool) -> Self {
5451 Self {
5452 ignore_punctuation,
5453 ..self
5454 }
5455 }
5456
5457 pub fn is_whitespace(&self, c: char) -> bool {
5458 self.kind(c) == CharKind::Whitespace
5459 }
5460
5461 pub fn is_word(&self, c: char) -> bool {
5462 self.kind(c) == CharKind::Word
5463 }
5464
5465 pub fn is_punctuation(&self, c: char) -> bool {
5466 self.kind(c) == CharKind::Punctuation
5467 }
5468
5469 pub fn kind_with(&self, c: char, ignore_punctuation: bool) -> CharKind {
5470 if c.is_alphanumeric() || c == '_' {
5471 return CharKind::Word;
5472 }
5473
5474 if let Some(scope) = &self.scope {
5475 let characters = match self.scope_context {
5476 Some(CharScopeContext::Completion) => scope.completion_query_characters(),
5477 Some(CharScopeContext::LinkedEdit) => scope.linked_edit_characters(),
5478 None => scope.word_characters(),
5479 };
5480 if let Some(characters) = characters
5481 && characters.contains(&c)
5482 {
5483 return CharKind::Word;
5484 }
5485 }
5486
5487 if c.is_whitespace() {
5488 return CharKind::Whitespace;
5489 }
5490
5491 if ignore_punctuation {
5492 CharKind::Word
5493 } else {
5494 CharKind::Punctuation
5495 }
5496 }
5497
5498 pub fn kind(&self, c: char) -> CharKind {
5499 self.kind_with(c, self.ignore_punctuation)
5500 }
5501}
5502
5503/// Find all of the ranges of whitespace that occur at the ends of lines
5504/// in the given rope.
5505///
5506/// This could also be done with a regex search, but this implementation
5507/// avoids copying text.
5508pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
5509 let mut ranges = Vec::new();
5510
5511 let mut offset = 0;
5512 let mut prev_chunk_trailing_whitespace_range = 0..0;
5513 for chunk in rope.chunks() {
5514 let mut prev_line_trailing_whitespace_range = 0..0;
5515 for (i, line) in chunk.split('\n').enumerate() {
5516 let line_end_offset = offset + line.len();
5517 let trimmed_line_len = line.trim_end_matches([' ', '\t']).len();
5518 let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
5519
5520 if i == 0 && trimmed_line_len == 0 {
5521 trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
5522 }
5523 if !prev_line_trailing_whitespace_range.is_empty() {
5524 ranges.push(prev_line_trailing_whitespace_range);
5525 }
5526
5527 offset = line_end_offset + 1;
5528 prev_line_trailing_whitespace_range = trailing_whitespace_range;
5529 }
5530
5531 offset -= 1;
5532 prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
5533 }
5534
5535 if !prev_chunk_trailing_whitespace_range.is_empty() {
5536 ranges.push(prev_chunk_trailing_whitespace_range);
5537 }
5538
5539 ranges
5540}