1use crate::{
2 DebuggerTextObject, LanguageScope, Outline, OutlineConfig, RunnableCapture, RunnableTag,
3 TextObject, TreeSitterOptions,
4 diagnostic_set::{DiagnosticEntry, DiagnosticEntryRef, DiagnosticGroup},
5 language_settings::{LanguageSettings, language_settings},
6 outline::OutlineItem,
7 syntax_map::{
8 SyntaxLayer, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures, SyntaxMapMatch,
9 SyntaxMapMatches, SyntaxSnapshot, ToTreeSitterPoint,
10 },
11 task_context::RunnableRange,
12 text_diff::text_diff,
13};
14pub use crate::{
15 Grammar, Language, LanguageRegistry,
16 diagnostic_set::DiagnosticSet,
17 highlight_map::{HighlightId, HighlightMap},
18 proto,
19};
20use anyhow::{Context as _, Result};
21use clock::Lamport;
22pub use clock::ReplicaId;
23use collections::HashMap;
24use encodings::{Encoding, EncodingOptions};
25use fs::MTime;
26use futures::channel::oneshot;
27use gpui::{
28 App, AppContext as _, BackgroundExecutor, Context, Entity, EventEmitter, HighlightStyle,
29 SharedString, StyledText, Task, TaskLabel, TextStyle,
30};
31
32use lsp::{LanguageServerId, NumberOrString};
33use parking_lot::Mutex;
34use serde::{Deserialize, Serialize};
35use serde_json::Value;
36use settings::WorktreeId;
37use smallvec::SmallVec;
38use smol::future::yield_now;
39use std::{
40 any::Any,
41 borrow::Cow,
42 cell::Cell,
43 cmp::{self, Ordering, Reverse},
44 collections::{BTreeMap, BTreeSet},
45 future::Future,
46 iter::{self, Iterator, Peekable},
47 mem,
48 num::NonZeroU32,
49 ops::{Deref, Range},
50 path::PathBuf,
51 rc,
52 sync::{Arc, LazyLock},
53 time::{Duration, Instant},
54 vec,
55};
56use sum_tree::TreeMap;
57use text::operation_queue::OperationQueue;
58use text::*;
59pub use text::{
60 Anchor, Bias, Buffer as TextBuffer, BufferId, BufferSnapshot as TextBufferSnapshot, Edit,
61 LineIndent, OffsetRangeExt, OffsetUtf16, Patch, Point, PointUtf16, Rope, Selection,
62 SelectionGoal, Subscription, TextDimension, TextSummary, ToOffset, ToOffsetUtf16, ToPoint,
63 ToPointUtf16, Transaction, TransactionId, Unclipped,
64};
65use theme::{ActiveTheme as _, SyntaxTheme};
66#[cfg(any(test, feature = "test-support"))]
67use util::RandomCharIter;
68use util::{RangeExt, debug_panic, maybe, paths::PathStyle, rel_path::RelPath};
69
70#[cfg(any(test, feature = "test-support"))]
71pub use {tree_sitter_python, tree_sitter_rust, tree_sitter_typescript};
72
73pub use lsp::DiagnosticSeverity;
74
75/// A label for the background task spawned by the buffer to compute
76/// a diff against the contents of its file.
77pub static BUFFER_DIFF_TASK: LazyLock<TaskLabel> = LazyLock::new(TaskLabel::new);
78
79/// Indicate whether a [`Buffer`] has permissions to edit.
80#[derive(PartialEq, Clone, Copy, Debug)]
81pub enum Capability {
82 /// The buffer is a mutable replica.
83 ReadWrite,
84 /// The buffer is a read-only replica.
85 ReadOnly,
86}
87
88pub type BufferRow = u32;
89
90/// An in-memory representation of a source code file, including its text,
91/// syntax trees, git status, and diagnostics.
92pub struct Buffer {
93 text: TextBuffer,
94 branch_state: Option<BufferBranchState>,
95 /// Filesystem state, `None` when there is no path.
96 file: Option<Arc<dyn File>>,
97 /// The mtime of the file when this buffer was last loaded from
98 /// or saved to disk.
99 saved_mtime: Option<MTime>,
100 /// The version vector when this buffer was last loaded from
101 /// or saved to disk.
102 saved_version: clock::Global,
103 preview_version: clock::Global,
104 transaction_depth: usize,
105 was_dirty_before_starting_transaction: Option<bool>,
106 reload_task: Option<Task<Result<()>>>,
107 language: Option<Arc<Language>>,
108 autoindent_requests: Vec<Arc<AutoindentRequest>>,
109 wait_for_autoindent_txs: Vec<oneshot::Sender<()>>,
110 pending_autoindent: Option<Task<()>>,
111 sync_parse_timeout: Duration,
112 syntax_map: Mutex<SyntaxMap>,
113 reparse: Option<Task<()>>,
114 parse_status: (watch::Sender<ParseStatus>, watch::Receiver<ParseStatus>),
115 non_text_state_update_count: usize,
116 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
117 remote_selections: TreeMap<ReplicaId, SelectionSet>,
118 diagnostics_timestamp: clock::Lamport,
119 completion_triggers: BTreeSet<String>,
120 completion_triggers_per_language_server: HashMap<LanguageServerId, BTreeSet<String>>,
121 completion_triggers_timestamp: clock::Lamport,
122 deferred_ops: OperationQueue<Operation>,
123 capability: Capability,
124 has_conflict: bool,
125 /// Memoize calls to has_changes_since(saved_version).
126 /// The contents of a cell are (self.version, has_changes) at the time of a last call.
127 has_unsaved_edits: Cell<(clock::Global, bool)>,
128 change_bits: Vec<rc::Weak<Cell<bool>>>,
129 _subscriptions: Vec<gpui::Subscription>,
130 encoding: Encoding,
131}
132
133#[derive(Copy, Clone, Debug, PartialEq, Eq)]
134pub enum ParseStatus {
135 Idle,
136 Parsing,
137}
138
139struct BufferBranchState {
140 base_buffer: Entity<Buffer>,
141 merged_operations: Vec<Lamport>,
142}
143
144/// An immutable, cheaply cloneable representation of a fixed
145/// state of a buffer.
146pub struct BufferSnapshot {
147 pub text: text::BufferSnapshot,
148 pub syntax: SyntaxSnapshot,
149 file: Option<Arc<dyn File>>,
150 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
151 remote_selections: TreeMap<ReplicaId, SelectionSet>,
152 language: Option<Arc<Language>>,
153 non_text_state_update_count: usize,
154}
155
156/// The kind and amount of indentation in a particular line. For now,
157/// assumes that indentation is all the same character.
158#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
159pub struct IndentSize {
160 /// The number of bytes that comprise the indentation.
161 pub len: u32,
162 /// The kind of whitespace used for indentation.
163 pub kind: IndentKind,
164}
165
166/// A whitespace character that's used for indentation.
167#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
168pub enum IndentKind {
169 /// An ASCII space character.
170 #[default]
171 Space,
172 /// An ASCII tab character.
173 Tab,
174}
175
176/// The shape of a selection cursor.
177#[derive(Copy, Clone, Debug, Default, PartialEq, Eq)]
178pub enum CursorShape {
179 /// A vertical bar
180 #[default]
181 Bar,
182 /// A block that surrounds the following character
183 Block,
184 /// An underline that runs along the following character
185 Underline,
186 /// A box drawn around the following character
187 Hollow,
188}
189
190impl From<settings::CursorShape> for CursorShape {
191 fn from(shape: settings::CursorShape) -> Self {
192 match shape {
193 settings::CursorShape::Bar => CursorShape::Bar,
194 settings::CursorShape::Block => CursorShape::Block,
195 settings::CursorShape::Underline => CursorShape::Underline,
196 settings::CursorShape::Hollow => CursorShape::Hollow,
197 }
198 }
199}
200
201#[derive(Clone, Debug)]
202struct SelectionSet {
203 line_mode: bool,
204 cursor_shape: CursorShape,
205 selections: Arc<[Selection<Anchor>]>,
206 lamport_timestamp: clock::Lamport,
207}
208
209/// A diagnostic associated with a certain range of a buffer.
210#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
211pub struct Diagnostic {
212 /// The name of the service that produced this diagnostic.
213 pub source: Option<String>,
214 /// A machine-readable code that identifies this diagnostic.
215 pub code: Option<NumberOrString>,
216 pub code_description: Option<lsp::Uri>,
217 /// Whether this diagnostic is a hint, warning, or error.
218 pub severity: DiagnosticSeverity,
219 /// The human-readable message associated with this diagnostic.
220 pub message: String,
221 /// The human-readable message (in markdown format)
222 pub markdown: Option<String>,
223 /// An id that identifies the group to which this diagnostic belongs.
224 ///
225 /// When a language server produces a diagnostic with
226 /// one or more associated diagnostics, those diagnostics are all
227 /// assigned a single group ID.
228 pub group_id: usize,
229 /// Whether this diagnostic is the primary diagnostic for its group.
230 ///
231 /// In a given group, the primary diagnostic is the top-level diagnostic
232 /// returned by the language server. The non-primary diagnostics are the
233 /// associated diagnostics.
234 pub is_primary: bool,
235 /// Whether this diagnostic is considered to originate from an analysis of
236 /// files on disk, as opposed to any unsaved buffer contents. This is a
237 /// property of a given diagnostic source, and is configured for a given
238 /// language server via the [`LspAdapter::disk_based_diagnostic_sources`](crate::LspAdapter::disk_based_diagnostic_sources) method
239 /// for the language server.
240 pub is_disk_based: bool,
241 /// Whether this diagnostic marks unnecessary code.
242 pub is_unnecessary: bool,
243 /// Quick separation of diagnostics groups based by their source.
244 pub source_kind: DiagnosticSourceKind,
245 /// Data from language server that produced this diagnostic. Passed back to the LS when we request code actions for this diagnostic.
246 pub data: Option<Value>,
247 /// Whether to underline the corresponding text range in the editor.
248 pub underline: bool,
249}
250
251#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)]
252pub enum DiagnosticSourceKind {
253 Pulled,
254 Pushed,
255 Other,
256}
257
258/// An operation used to synchronize this buffer with its other replicas.
259#[derive(Clone, Debug, PartialEq)]
260pub enum Operation {
261 /// A text operation.
262 Buffer(text::Operation),
263
264 /// An update to the buffer's diagnostics.
265 UpdateDiagnostics {
266 /// The id of the language server that produced the new diagnostics.
267 server_id: LanguageServerId,
268 /// The diagnostics.
269 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
270 /// The buffer's lamport timestamp.
271 lamport_timestamp: clock::Lamport,
272 },
273
274 /// An update to the most recent selections in this buffer.
275 UpdateSelections {
276 /// The selections.
277 selections: Arc<[Selection<Anchor>]>,
278 /// The buffer's lamport timestamp.
279 lamport_timestamp: clock::Lamport,
280 /// Whether the selections are in 'line mode'.
281 line_mode: bool,
282 /// The [`CursorShape`] associated with these selections.
283 cursor_shape: CursorShape,
284 },
285
286 /// An update to the characters that should trigger autocompletion
287 /// for this buffer.
288 UpdateCompletionTriggers {
289 /// The characters that trigger autocompletion.
290 triggers: Vec<String>,
291 /// The buffer's lamport timestamp.
292 lamport_timestamp: clock::Lamport,
293 /// The language server ID.
294 server_id: LanguageServerId,
295 },
296
297 /// An update to the line ending type of this buffer.
298 UpdateLineEnding {
299 /// The line ending type.
300 line_ending: LineEnding,
301 /// The buffer's lamport timestamp.
302 lamport_timestamp: clock::Lamport,
303 },
304}
305
306/// An event that occurs in a buffer.
307#[derive(Clone, Debug, PartialEq)]
308pub enum BufferEvent {
309 /// The buffer was changed in a way that must be
310 /// propagated to its other replicas.
311 Operation {
312 operation: Operation,
313 is_local: bool,
314 },
315 /// The buffer was edited.
316 Edited,
317 /// The buffer's `dirty` bit changed.
318 DirtyChanged,
319 /// The buffer was saved.
320 Saved,
321 /// The buffer's file was changed on disk.
322 FileHandleChanged,
323 /// The buffer was reloaded.
324 Reloaded,
325 /// The buffer is in need of a reload
326 ReloadNeeded,
327 /// The buffer's language was changed.
328 LanguageChanged,
329 /// The buffer's syntax trees were updated.
330 Reparsed,
331 /// The buffer's diagnostics were updated.
332 DiagnosticsUpdated,
333 /// The buffer gained or lost editing capabilities.
334 CapabilityChanged,
335 /// The buffer's encoding was changed.
336 EncodingChanged,
337}
338
339/// The file associated with a buffer.
340pub trait File: Send + Sync + Any {
341 /// Returns the [`LocalFile`] associated with this file, if the
342 /// file is local.
343 fn as_local(&self) -> Option<&dyn LocalFile>;
344
345 /// Returns whether this file is local.
346 fn is_local(&self) -> bool {
347 self.as_local().is_some()
348 }
349
350 /// Returns whether the file is new, exists in storage, or has been deleted. Includes metadata
351 /// only available in some states, such as modification time.
352 fn disk_state(&self) -> DiskState;
353
354 /// Returns the path of this file relative to the worktree's root directory.
355 fn path(&self) -> &Arc<RelPath>;
356
357 /// Returns the path of this file relative to the worktree's parent directory (this means it
358 /// includes the name of the worktree's root folder).
359 fn full_path(&self, cx: &App) -> PathBuf;
360
361 /// Returns the path style of this file.
362 fn path_style(&self, cx: &App) -> PathStyle;
363
364 /// Returns the last component of this handle's absolute path. If this handle refers to the root
365 /// of its worktree, then this method will return the name of the worktree itself.
366 fn file_name<'a>(&'a self, cx: &'a App) -> &'a str;
367
368 /// Returns the id of the worktree to which this file belongs.
369 ///
370 /// This is needed for looking up project-specific settings.
371 fn worktree_id(&self, cx: &App) -> WorktreeId;
372
373 /// Converts this file into a protobuf message.
374 fn to_proto(&self, cx: &App) -> rpc::proto::File;
375
376 /// Return whether Zed considers this to be a private file.
377 fn is_private(&self) -> bool;
378
379 fn encoding(&self) -> Option<Arc<Encoding>> {
380 unimplemented!()
381 }
382}
383
384/// The file's storage status - whether it's stored (`Present`), and if so when it was last
385/// modified. In the case where the file is not stored, it can be either `New` or `Deleted`. In the
386/// UI these two states are distinguished. For example, the buffer tab does not display a deletion
387/// indicator for new files.
388#[derive(Copy, Clone, Debug, PartialEq)]
389pub enum DiskState {
390 /// File created in Zed that has not been saved.
391 New,
392 /// File present on the filesystem.
393 Present { mtime: MTime },
394 /// Deleted file that was previously present.
395 Deleted,
396}
397
398impl DiskState {
399 /// Returns the file's last known modification time on disk.
400 pub fn mtime(self) -> Option<MTime> {
401 match self {
402 DiskState::New => None,
403 DiskState::Present { mtime } => Some(mtime),
404 DiskState::Deleted => None,
405 }
406 }
407
408 pub fn exists(&self) -> bool {
409 match self {
410 DiskState::New => false,
411 DiskState::Present { .. } => true,
412 DiskState::Deleted => false,
413 }
414 }
415}
416
417/// The file associated with a buffer, in the case where the file is on the local disk.
418pub trait LocalFile: File {
419 /// Returns the absolute path of this file
420 fn abs_path(&self, cx: &App) -> PathBuf;
421
422 /// Loads the file contents from disk and returns them as a UTF-8 encoded string.
423 fn load(&self, cx: &App, options: EncodingOptions) -> Task<Result<(Encoding, String)>>;
424
425 /// Loads the file's contents from disk.
426 fn load_bytes(&self, cx: &App) -> Task<Result<Vec<u8>>>;
427}
428
429/// The auto-indent behavior associated with an editing operation.
430/// For some editing operations, each affected line of text has its
431/// indentation recomputed. For other operations, the entire block
432/// of edited text is adjusted uniformly.
433#[derive(Clone, Debug)]
434pub enum AutoindentMode {
435 /// Indent each line of inserted text.
436 EachLine,
437 /// Apply the same indentation adjustment to all of the lines
438 /// in a given insertion.
439 Block {
440 /// The original indentation column of the first line of each
441 /// insertion, if it has been copied.
442 ///
443 /// Knowing this makes it possible to preserve the relative indentation
444 /// of every line in the insertion from when it was copied.
445 ///
446 /// If the original indent column is `a`, and the first line of insertion
447 /// is then auto-indented to column `b`, then every other line of
448 /// the insertion will be auto-indented to column `b - a`
449 original_indent_columns: Vec<Option<u32>>,
450 },
451}
452
453#[derive(Clone)]
454struct AutoindentRequest {
455 before_edit: BufferSnapshot,
456 entries: Vec<AutoindentRequestEntry>,
457 is_block_mode: bool,
458 ignore_empty_lines: bool,
459}
460
461#[derive(Debug, Clone)]
462struct AutoindentRequestEntry {
463 /// A range of the buffer whose indentation should be adjusted.
464 range: Range<Anchor>,
465 /// Whether or not these lines should be considered brand new, for the
466 /// purpose of auto-indent. When text is not new, its indentation will
467 /// only be adjusted if the suggested indentation level has *changed*
468 /// since the edit was made.
469 first_line_is_new: bool,
470 indent_size: IndentSize,
471 original_indent_column: Option<u32>,
472}
473
474#[derive(Debug)]
475struct IndentSuggestion {
476 basis_row: u32,
477 delta: Ordering,
478 within_error: bool,
479}
480
481struct BufferChunkHighlights<'a> {
482 captures: SyntaxMapCaptures<'a>,
483 next_capture: Option<SyntaxMapCapture<'a>>,
484 stack: Vec<(usize, HighlightId)>,
485 highlight_maps: Vec<HighlightMap>,
486}
487
488/// An iterator that yields chunks of a buffer's text, along with their
489/// syntax highlights and diagnostic status.
490pub struct BufferChunks<'a> {
491 buffer_snapshot: Option<&'a BufferSnapshot>,
492 range: Range<usize>,
493 chunks: text::Chunks<'a>,
494 diagnostic_endpoints: Option<Peekable<vec::IntoIter<DiagnosticEndpoint>>>,
495 error_depth: usize,
496 warning_depth: usize,
497 information_depth: usize,
498 hint_depth: usize,
499 unnecessary_depth: usize,
500 underline: bool,
501 highlights: Option<BufferChunkHighlights<'a>>,
502}
503
504/// A chunk of a buffer's text, along with its syntax highlight and
505/// diagnostic status.
506#[derive(Clone, Debug, Default)]
507pub struct Chunk<'a> {
508 /// The text of the chunk.
509 pub text: &'a str,
510 /// The syntax highlighting style of the chunk.
511 pub syntax_highlight_id: Option<HighlightId>,
512 /// The highlight style that has been applied to this chunk in
513 /// the editor.
514 pub highlight_style: Option<HighlightStyle>,
515 /// The severity of diagnostic associated with this chunk, if any.
516 pub diagnostic_severity: Option<DiagnosticSeverity>,
517 /// A bitset of which characters are tabs in this string.
518 pub tabs: u128,
519 /// Bitmap of character indices in this chunk
520 pub chars: u128,
521 /// Whether this chunk of text is marked as unnecessary.
522 pub is_unnecessary: bool,
523 /// Whether this chunk of text was originally a tab character.
524 pub is_tab: bool,
525 /// Whether this chunk of text was originally an inlay.
526 pub is_inlay: bool,
527 /// Whether to underline the corresponding text range in the editor.
528 pub underline: bool,
529}
530
531/// A set of edits to a given version of a buffer, computed asynchronously.
532#[derive(Debug)]
533pub struct Diff {
534 pub base_version: clock::Global,
535 pub line_ending: LineEnding,
536 pub edits: Vec<(Range<usize>, Arc<str>)>,
537}
538
539#[derive(Debug, Clone, Copy)]
540pub(crate) struct DiagnosticEndpoint {
541 offset: usize,
542 is_start: bool,
543 underline: bool,
544 severity: DiagnosticSeverity,
545 is_unnecessary: bool,
546}
547
548/// A class of characters, used for characterizing a run of text.
549#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
550pub enum CharKind {
551 /// Whitespace.
552 Whitespace,
553 /// Punctuation.
554 Punctuation,
555 /// Word.
556 Word,
557}
558
559/// Context for character classification within a specific scope.
560#[derive(Copy, Clone, Eq, PartialEq, Debug)]
561pub enum CharScopeContext {
562 /// Character classification for completion queries.
563 ///
564 /// This context treats certain characters as word constituents that would
565 /// normally be considered punctuation, such as '-' in Tailwind classes
566 /// ("bg-yellow-100") or '.' in import paths ("foo.ts").
567 Completion,
568 /// Character classification for linked edits.
569 ///
570 /// This context handles characters that should be treated as part of
571 /// identifiers during linked editing operations, such as '.' in JSX
572 /// component names like `<Animated.View>`.
573 LinkedEdit,
574}
575
576/// A runnable is a set of data about a region that could be resolved into a task
577pub struct Runnable {
578 pub tags: SmallVec<[RunnableTag; 1]>,
579 pub language: Arc<Language>,
580 pub buffer: BufferId,
581}
582
583#[derive(Default, Clone, Debug)]
584pub struct HighlightedText {
585 pub text: SharedString,
586 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
587}
588
589#[derive(Default, Debug)]
590struct HighlightedTextBuilder {
591 pub text: String,
592 highlights: Vec<(Range<usize>, HighlightStyle)>,
593}
594
595impl HighlightedText {
596 pub fn from_buffer_range<T: ToOffset>(
597 range: Range<T>,
598 snapshot: &text::BufferSnapshot,
599 syntax_snapshot: &SyntaxSnapshot,
600 override_style: Option<HighlightStyle>,
601 syntax_theme: &SyntaxTheme,
602 ) -> Self {
603 let mut highlighted_text = HighlightedTextBuilder::default();
604 highlighted_text.add_text_from_buffer_range(
605 range,
606 snapshot,
607 syntax_snapshot,
608 override_style,
609 syntax_theme,
610 );
611 highlighted_text.build()
612 }
613
614 pub fn to_styled_text(&self, default_style: &TextStyle) -> StyledText {
615 gpui::StyledText::new(self.text.clone())
616 .with_default_highlights(default_style, self.highlights.iter().cloned())
617 }
618
619 /// Returns the first line without leading whitespace unless highlighted
620 /// and a boolean indicating if there are more lines after
621 pub fn first_line_preview(self) -> (Self, bool) {
622 let newline_ix = self.text.find('\n').unwrap_or(self.text.len());
623 let first_line = &self.text[..newline_ix];
624
625 // Trim leading whitespace, unless an edit starts prior to it.
626 let mut preview_start_ix = first_line.len() - first_line.trim_start().len();
627 if let Some((first_highlight_range, _)) = self.highlights.first() {
628 preview_start_ix = preview_start_ix.min(first_highlight_range.start);
629 }
630
631 let preview_text = &first_line[preview_start_ix..];
632 let preview_highlights = self
633 .highlights
634 .into_iter()
635 .skip_while(|(range, _)| range.end <= preview_start_ix)
636 .take_while(|(range, _)| range.start < newline_ix)
637 .filter_map(|(mut range, highlight)| {
638 range.start = range.start.saturating_sub(preview_start_ix);
639 range.end = range.end.min(newline_ix).saturating_sub(preview_start_ix);
640 if range.is_empty() {
641 None
642 } else {
643 Some((range, highlight))
644 }
645 });
646
647 let preview = Self {
648 text: SharedString::new(preview_text),
649 highlights: preview_highlights.collect(),
650 };
651
652 (preview, self.text.len() > newline_ix)
653 }
654}
655
656impl HighlightedTextBuilder {
657 pub fn build(self) -> HighlightedText {
658 HighlightedText {
659 text: self.text.into(),
660 highlights: self.highlights,
661 }
662 }
663
664 pub fn add_text_from_buffer_range<T: ToOffset>(
665 &mut self,
666 range: Range<T>,
667 snapshot: &text::BufferSnapshot,
668 syntax_snapshot: &SyntaxSnapshot,
669 override_style: Option<HighlightStyle>,
670 syntax_theme: &SyntaxTheme,
671 ) {
672 let range = range.to_offset(snapshot);
673 for chunk in Self::highlighted_chunks(range, snapshot, syntax_snapshot) {
674 let start = self.text.len();
675 self.text.push_str(chunk.text);
676 let end = self.text.len();
677
678 if let Some(highlight_style) = chunk
679 .syntax_highlight_id
680 .and_then(|id| id.style(syntax_theme))
681 {
682 let highlight_style = override_style.map_or(highlight_style, |override_style| {
683 highlight_style.highlight(override_style)
684 });
685 self.highlights.push((start..end, highlight_style));
686 } else if let Some(override_style) = override_style {
687 self.highlights.push((start..end, override_style));
688 }
689 }
690 }
691
692 fn highlighted_chunks<'a>(
693 range: Range<usize>,
694 snapshot: &'a text::BufferSnapshot,
695 syntax_snapshot: &'a SyntaxSnapshot,
696 ) -> BufferChunks<'a> {
697 let captures = syntax_snapshot.captures(range.clone(), snapshot, |grammar| {
698 grammar
699 .highlights_config
700 .as_ref()
701 .map(|config| &config.query)
702 });
703
704 let highlight_maps = captures
705 .grammars()
706 .iter()
707 .map(|grammar| grammar.highlight_map())
708 .collect();
709
710 BufferChunks::new(
711 snapshot.as_rope(),
712 range,
713 Some((captures, highlight_maps)),
714 false,
715 None,
716 )
717 }
718}
719
720#[derive(Clone)]
721pub struct EditPreview {
722 old_snapshot: text::BufferSnapshot,
723 applied_edits_snapshot: text::BufferSnapshot,
724 syntax_snapshot: SyntaxSnapshot,
725}
726
727impl EditPreview {
728 pub fn highlight_edits(
729 &self,
730 current_snapshot: &BufferSnapshot,
731 edits: &[(Range<Anchor>, String)],
732 include_deletions: bool,
733 cx: &App,
734 ) -> HighlightedText {
735 let Some(visible_range_in_preview_snapshot) = self.compute_visible_range(edits) else {
736 return HighlightedText::default();
737 };
738
739 let mut highlighted_text = HighlightedTextBuilder::default();
740
741 let mut offset_in_preview_snapshot = visible_range_in_preview_snapshot.start;
742
743 let insertion_highlight_style = HighlightStyle {
744 background_color: Some(cx.theme().status().created_background),
745 ..Default::default()
746 };
747 let deletion_highlight_style = HighlightStyle {
748 background_color: Some(cx.theme().status().deleted_background),
749 ..Default::default()
750 };
751 let syntax_theme = cx.theme().syntax();
752
753 for (range, edit_text) in edits {
754 let edit_new_end_in_preview_snapshot = range
755 .end
756 .bias_right(&self.old_snapshot)
757 .to_offset(&self.applied_edits_snapshot);
758 let edit_start_in_preview_snapshot = edit_new_end_in_preview_snapshot - edit_text.len();
759
760 let unchanged_range_in_preview_snapshot =
761 offset_in_preview_snapshot..edit_start_in_preview_snapshot;
762 if !unchanged_range_in_preview_snapshot.is_empty() {
763 highlighted_text.add_text_from_buffer_range(
764 unchanged_range_in_preview_snapshot,
765 &self.applied_edits_snapshot,
766 &self.syntax_snapshot,
767 None,
768 syntax_theme,
769 );
770 }
771
772 let range_in_current_snapshot = range.to_offset(current_snapshot);
773 if include_deletions && !range_in_current_snapshot.is_empty() {
774 highlighted_text.add_text_from_buffer_range(
775 range_in_current_snapshot,
776 ¤t_snapshot.text,
777 ¤t_snapshot.syntax,
778 Some(deletion_highlight_style),
779 syntax_theme,
780 );
781 }
782
783 if !edit_text.is_empty() {
784 highlighted_text.add_text_from_buffer_range(
785 edit_start_in_preview_snapshot..edit_new_end_in_preview_snapshot,
786 &self.applied_edits_snapshot,
787 &self.syntax_snapshot,
788 Some(insertion_highlight_style),
789 syntax_theme,
790 );
791 }
792
793 offset_in_preview_snapshot = edit_new_end_in_preview_snapshot;
794 }
795
796 highlighted_text.add_text_from_buffer_range(
797 offset_in_preview_snapshot..visible_range_in_preview_snapshot.end,
798 &self.applied_edits_snapshot,
799 &self.syntax_snapshot,
800 None,
801 syntax_theme,
802 );
803
804 highlighted_text.build()
805 }
806
807 fn compute_visible_range(&self, edits: &[(Range<Anchor>, String)]) -> Option<Range<usize>> {
808 let (first, _) = edits.first()?;
809 let (last, _) = edits.last()?;
810
811 let start = first
812 .start
813 .bias_left(&self.old_snapshot)
814 .to_point(&self.applied_edits_snapshot);
815 let end = last
816 .end
817 .bias_right(&self.old_snapshot)
818 .to_point(&self.applied_edits_snapshot);
819
820 // Ensure that the first line of the first edit and the last line of the last edit are always fully visible
821 let range = Point::new(start.row, 0)
822 ..Point::new(end.row, self.applied_edits_snapshot.line_len(end.row));
823
824 Some(range.to_offset(&self.applied_edits_snapshot))
825 }
826}
827
828#[derive(Clone, Debug, PartialEq, Eq)]
829pub struct BracketMatch {
830 pub open_range: Range<usize>,
831 pub close_range: Range<usize>,
832 pub newline_only: bool,
833}
834
835impl Buffer {
836 /// Create a new buffer with the given base text.
837 pub fn local<T: Into<String>>(base_text: T, cx: &Context<Self>) -> Self {
838 Self::build(
839 TextBuffer::new(
840 ReplicaId::LOCAL,
841 cx.entity_id().as_non_zero_u64().into(),
842 base_text.into(),
843 &cx.background_executor(),
844 ),
845 None,
846 Capability::ReadWrite,
847 )
848 }
849
850 /// Replace the text buffer. This function is in contrast to `set_text` in that it does not
851 /// change the buffer's editing state
852 pub fn replace_text_buffer(&mut self, new: TextBuffer, cx: &mut Context<Self>) {
853 self.text = new;
854 self.saved_version = self.version.clone();
855 self.has_unsaved_edits.set((self.version.clone(), false));
856
857 self.was_changed();
858 cx.emit(BufferEvent::DirtyChanged);
859 cx.notify();
860 }
861
862 /// Create a new buffer with the given base text that has proper line endings and other normalization applied.
863 pub fn local_normalized(
864 base_text_normalized: Rope,
865 line_ending: LineEnding,
866 cx: &Context<Self>,
867 ) -> Self {
868 Self::build(
869 TextBuffer::new_normalized(
870 ReplicaId::LOCAL,
871 cx.entity_id().as_non_zero_u64().into(),
872 line_ending,
873 base_text_normalized,
874 ),
875 None,
876 Capability::ReadWrite,
877 )
878 }
879
880 /// Create a new buffer that is a replica of a remote buffer.
881 pub fn remote(
882 remote_id: BufferId,
883 replica_id: ReplicaId,
884 capability: Capability,
885 base_text: impl Into<String>,
886 cx: &BackgroundExecutor,
887 ) -> Self {
888 Self::build(
889 TextBuffer::new(replica_id, remote_id, base_text.into(), cx),
890 None,
891 capability,
892 )
893 }
894
895 /// Create a new buffer that is a replica of a remote buffer, populating its
896 /// state from the given protobuf message.
897 pub fn from_proto(
898 replica_id: ReplicaId,
899 capability: Capability,
900 message: proto::BufferState,
901 file: Option<Arc<dyn File>>,
902 cx: &BackgroundExecutor,
903 ) -> Result<Self> {
904 let buffer_id = BufferId::new(message.id).context("Could not deserialize buffer_id")?;
905 let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text, cx);
906 let mut this = Self::build(buffer, file, capability);
907 this.text.set_line_ending(proto::deserialize_line_ending(
908 rpc::proto::LineEnding::from_i32(message.line_ending).context("missing line_ending")?,
909 ));
910 this.saved_version = proto::deserialize_version(&message.saved_version);
911 this.saved_mtime = message.saved_mtime.map(|time| time.into());
912 Ok(this)
913 }
914
915 /// Serialize the buffer's state to a protobuf message.
916 pub fn to_proto(&self, cx: &App) -> proto::BufferState {
917 proto::BufferState {
918 id: self.remote_id().into(),
919 file: self.file.as_ref().map(|f| f.to_proto(cx)),
920 base_text: self.base_text().to_string(),
921 line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
922 saved_version: proto::serialize_version(&self.saved_version),
923 saved_mtime: self.saved_mtime.map(|time| time.into()),
924 }
925 }
926
927 /// Serialize as protobufs all of the changes to the buffer since the given version.
928 pub fn serialize_ops(
929 &self,
930 since: Option<clock::Global>,
931 cx: &App,
932 ) -> Task<Vec<proto::Operation>> {
933 let mut operations = Vec::new();
934 operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
935
936 operations.extend(self.remote_selections.iter().map(|(_, set)| {
937 proto::serialize_operation(&Operation::UpdateSelections {
938 selections: set.selections.clone(),
939 lamport_timestamp: set.lamport_timestamp,
940 line_mode: set.line_mode,
941 cursor_shape: set.cursor_shape,
942 })
943 }));
944
945 for (server_id, diagnostics) in &self.diagnostics {
946 operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
947 lamport_timestamp: self.diagnostics_timestamp,
948 server_id: *server_id,
949 diagnostics: diagnostics.iter().cloned().collect(),
950 }));
951 }
952
953 for (server_id, completions) in &self.completion_triggers_per_language_server {
954 operations.push(proto::serialize_operation(
955 &Operation::UpdateCompletionTriggers {
956 triggers: completions.iter().cloned().collect(),
957 lamport_timestamp: self.completion_triggers_timestamp,
958 server_id: *server_id,
959 },
960 ));
961 }
962
963 let text_operations = self.text.operations().clone();
964 cx.background_spawn(async move {
965 let since = since.unwrap_or_default();
966 operations.extend(
967 text_operations
968 .iter()
969 .filter(|(_, op)| !since.observed(op.timestamp()))
970 .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
971 );
972 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
973 operations
974 })
975 }
976
977 /// Assign a language to the buffer, returning the buffer.
978 pub fn with_language(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
979 self.set_language(Some(language), cx);
980 self
981 }
982
983 /// Returns the [`Capability`] of this buffer.
984 pub fn capability(&self) -> Capability {
985 self.capability
986 }
987
988 /// Whether this buffer can only be read.
989 pub fn read_only(&self) -> bool {
990 self.capability == Capability::ReadOnly
991 }
992
993 /// Builds a [`Buffer`] with the given underlying [`TextBuffer`], diff base, [`File`] and [`Capability`].
994 pub fn build(buffer: TextBuffer, file: Option<Arc<dyn File>>, capability: Capability) -> Self {
995 let saved_mtime = file.as_ref().and_then(|file| file.disk_state().mtime());
996 let snapshot = buffer.snapshot();
997 let syntax_map = Mutex::new(SyntaxMap::new(&snapshot));
998 Self {
999 saved_mtime,
1000 saved_version: buffer.version(),
1001 preview_version: buffer.version(),
1002 reload_task: None,
1003 transaction_depth: 0,
1004 was_dirty_before_starting_transaction: None,
1005 has_unsaved_edits: Cell::new((buffer.version(), false)),
1006 text: buffer,
1007 branch_state: None,
1008 file,
1009 capability,
1010 syntax_map,
1011 reparse: None,
1012 non_text_state_update_count: 0,
1013 sync_parse_timeout: Duration::from_millis(1),
1014 parse_status: watch::channel(ParseStatus::Idle),
1015 autoindent_requests: Default::default(),
1016 wait_for_autoindent_txs: Default::default(),
1017 pending_autoindent: Default::default(),
1018 language: None,
1019 remote_selections: Default::default(),
1020 diagnostics: Default::default(),
1021 diagnostics_timestamp: Lamport::MIN,
1022 completion_triggers: Default::default(),
1023 completion_triggers_per_language_server: Default::default(),
1024 completion_triggers_timestamp: Lamport::MIN,
1025 deferred_ops: OperationQueue::new(),
1026 has_conflict: false,
1027 change_bits: Default::default(),
1028 _subscriptions: Vec::new(),
1029 encoding: Encoding::default(),
1030 }
1031 }
1032
1033 pub fn build_snapshot(
1034 text: Rope,
1035 language: Option<Arc<Language>>,
1036 language_registry: Option<Arc<LanguageRegistry>>,
1037 cx: &mut App,
1038 ) -> impl Future<Output = BufferSnapshot> + use<> {
1039 let entity_id = cx.reserve_entity::<Self>().entity_id();
1040 let buffer_id = entity_id.as_non_zero_u64().into();
1041 async move {
1042 let text =
1043 TextBuffer::new_normalized(ReplicaId::LOCAL, buffer_id, Default::default(), text)
1044 .snapshot();
1045 let mut syntax = SyntaxMap::new(&text).snapshot();
1046 if let Some(language) = language.clone() {
1047 let language_registry = language_registry.clone();
1048 syntax.reparse(&text, language_registry, language);
1049 }
1050 BufferSnapshot {
1051 text,
1052 syntax,
1053 file: None,
1054 diagnostics: Default::default(),
1055 remote_selections: Default::default(),
1056 language,
1057 non_text_state_update_count: 0,
1058 }
1059 }
1060 }
1061
1062 pub fn build_empty_snapshot(cx: &mut App) -> BufferSnapshot {
1063 let entity_id = cx.reserve_entity::<Self>().entity_id();
1064 let buffer_id = entity_id.as_non_zero_u64().into();
1065 let text = TextBuffer::new_normalized(
1066 ReplicaId::LOCAL,
1067 buffer_id,
1068 Default::default(),
1069 Rope::new(),
1070 )
1071 .snapshot();
1072 let syntax = SyntaxMap::new(&text).snapshot();
1073 BufferSnapshot {
1074 text,
1075 syntax,
1076 file: None,
1077 diagnostics: Default::default(),
1078 remote_selections: Default::default(),
1079 language: None,
1080 non_text_state_update_count: 0,
1081 }
1082 }
1083
1084 #[cfg(any(test, feature = "test-support"))]
1085 pub fn build_snapshot_sync(
1086 text: Rope,
1087 language: Option<Arc<Language>>,
1088 language_registry: Option<Arc<LanguageRegistry>>,
1089 cx: &mut App,
1090 ) -> BufferSnapshot {
1091 let entity_id = cx.reserve_entity::<Self>().entity_id();
1092 let buffer_id = entity_id.as_non_zero_u64().into();
1093 let text =
1094 TextBuffer::new_normalized(ReplicaId::LOCAL, buffer_id, Default::default(), text)
1095 .snapshot();
1096 let mut syntax = SyntaxMap::new(&text).snapshot();
1097 if let Some(language) = language.clone() {
1098 syntax.reparse(&text, language_registry, language);
1099 }
1100 BufferSnapshot {
1101 text,
1102 syntax,
1103 file: None,
1104 diagnostics: Default::default(),
1105 remote_selections: Default::default(),
1106 language,
1107 non_text_state_update_count: 0,
1108 }
1109 }
1110
1111 /// Retrieve a snapshot of the buffer's current state. This is computationally
1112 /// cheap, and allows reading from the buffer on a background thread.
1113 pub fn snapshot(&self) -> BufferSnapshot {
1114 let text = self.text.snapshot();
1115 let mut syntax_map = self.syntax_map.lock();
1116 syntax_map.interpolate(&text);
1117 let syntax = syntax_map.snapshot();
1118
1119 BufferSnapshot {
1120 text,
1121 syntax,
1122 file: self.file.clone(),
1123 remote_selections: self.remote_selections.clone(),
1124 diagnostics: self.diagnostics.clone(),
1125 language: self.language.clone(),
1126 non_text_state_update_count: self.non_text_state_update_count,
1127 }
1128 }
1129
1130 pub fn branch(&mut self, cx: &mut Context<Self>) -> Entity<Self> {
1131 let this = cx.entity();
1132 cx.new(|cx| {
1133 let mut branch = Self {
1134 branch_state: Some(BufferBranchState {
1135 base_buffer: this.clone(),
1136 merged_operations: Default::default(),
1137 }),
1138 language: self.language.clone(),
1139 has_conflict: self.has_conflict,
1140 has_unsaved_edits: Cell::new(self.has_unsaved_edits.get_mut().clone()),
1141 _subscriptions: vec![cx.subscribe(&this, Self::on_base_buffer_event)],
1142 ..Self::build(self.text.branch(), self.file.clone(), self.capability())
1143 };
1144 if let Some(language_registry) = self.language_registry() {
1145 branch.set_language_registry(language_registry);
1146 }
1147
1148 // Reparse the branch buffer so that we get syntax highlighting immediately.
1149 branch.reparse(cx);
1150
1151 branch
1152 })
1153 }
1154
1155 pub fn preview_edits(
1156 &self,
1157 edits: Arc<[(Range<Anchor>, String)]>,
1158 cx: &App,
1159 ) -> Task<EditPreview> {
1160 let registry = self.language_registry();
1161 let language = self.language().cloned();
1162 let old_snapshot = self.text.snapshot();
1163 let mut branch_buffer = self.text.branch();
1164 let mut syntax_snapshot = self.syntax_map.lock().snapshot();
1165 let executor = cx.background_executor().clone();
1166 cx.background_spawn(async move {
1167 if !edits.is_empty() {
1168 if let Some(language) = language.clone() {
1169 syntax_snapshot.reparse(&old_snapshot, registry.clone(), language);
1170 }
1171
1172 branch_buffer.edit(edits.iter().cloned(), &executor);
1173 let snapshot = branch_buffer.snapshot();
1174 syntax_snapshot.interpolate(&snapshot);
1175
1176 if let Some(language) = language {
1177 syntax_snapshot.reparse(&snapshot, registry, language);
1178 }
1179 }
1180 EditPreview {
1181 old_snapshot,
1182 applied_edits_snapshot: branch_buffer.snapshot(),
1183 syntax_snapshot,
1184 }
1185 })
1186 }
1187
1188 /// Applies all of the changes in this buffer that intersect any of the
1189 /// given `ranges` to its base buffer.
1190 ///
1191 /// If `ranges` is empty, then all changes will be applied. This buffer must
1192 /// be a branch buffer to call this method.
1193 pub fn merge_into_base(&mut self, ranges: Vec<Range<usize>>, cx: &mut Context<Self>) {
1194 let Some(base_buffer) = self.base_buffer() else {
1195 debug_panic!("not a branch buffer");
1196 return;
1197 };
1198
1199 let mut ranges = if ranges.is_empty() {
1200 &[0..usize::MAX]
1201 } else {
1202 ranges.as_slice()
1203 }
1204 .iter()
1205 .peekable();
1206
1207 let mut edits = Vec::new();
1208 for edit in self.edits_since::<usize>(&base_buffer.read(cx).version()) {
1209 let mut is_included = false;
1210 while let Some(range) = ranges.peek() {
1211 if range.end < edit.new.start {
1212 ranges.next().unwrap();
1213 } else {
1214 if range.start <= edit.new.end {
1215 is_included = true;
1216 }
1217 break;
1218 }
1219 }
1220
1221 if is_included {
1222 edits.push((
1223 edit.old.clone(),
1224 self.text_for_range(edit.new.clone()).collect::<String>(),
1225 ));
1226 }
1227 }
1228
1229 let operation = base_buffer.update(cx, |base_buffer, cx| {
1230 // cx.emit(BufferEvent::DiffBaseChanged);
1231 base_buffer.edit(edits, None, cx)
1232 });
1233
1234 if let Some(operation) = operation
1235 && let Some(BufferBranchState {
1236 merged_operations, ..
1237 }) = &mut self.branch_state
1238 {
1239 merged_operations.push(operation);
1240 }
1241 }
1242
1243 fn on_base_buffer_event(
1244 &mut self,
1245 _: Entity<Buffer>,
1246 event: &BufferEvent,
1247 cx: &mut Context<Self>,
1248 ) {
1249 let BufferEvent::Operation { operation, .. } = event else {
1250 return;
1251 };
1252 let Some(BufferBranchState {
1253 merged_operations, ..
1254 }) = &mut self.branch_state
1255 else {
1256 return;
1257 };
1258
1259 let mut operation_to_undo = None;
1260 if let Operation::Buffer(text::Operation::Edit(operation)) = &operation
1261 && let Ok(ix) = merged_operations.binary_search(&operation.timestamp)
1262 {
1263 merged_operations.remove(ix);
1264 operation_to_undo = Some(operation.timestamp);
1265 }
1266
1267 self.apply_ops([operation.clone()], cx);
1268
1269 if let Some(timestamp) = operation_to_undo {
1270 let counts = [(timestamp, u32::MAX)].into_iter().collect();
1271 self.undo_operations(counts, cx);
1272 }
1273 }
1274
1275 #[cfg(test)]
1276 pub(crate) fn as_text_snapshot(&self) -> &text::BufferSnapshot {
1277 &self.text
1278 }
1279
1280 /// Retrieve a snapshot of the buffer's raw text, without any
1281 /// language-related state like the syntax tree or diagnostics.
1282 pub fn text_snapshot(&self) -> text::BufferSnapshot {
1283 self.text.snapshot()
1284 }
1285
1286 /// The file associated with the buffer, if any.
1287 pub fn file(&self) -> Option<&Arc<dyn File>> {
1288 self.file.as_ref()
1289 }
1290
1291 /// The version of the buffer that was last saved or reloaded from disk.
1292 pub fn saved_version(&self) -> &clock::Global {
1293 &self.saved_version
1294 }
1295
1296 /// The mtime of the buffer's file when the buffer was last saved or reloaded from disk.
1297 pub fn saved_mtime(&self) -> Option<MTime> {
1298 self.saved_mtime
1299 }
1300
1301 /// Assign a language to the buffer.
1302 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1303 self.non_text_state_update_count += 1;
1304 self.syntax_map.lock().clear(&self.text);
1305 self.language = language;
1306 self.was_changed();
1307 self.reparse(cx);
1308 cx.emit(BufferEvent::LanguageChanged);
1309 }
1310
1311 /// Assign a language registry to the buffer. This allows the buffer to retrieve
1312 /// other languages if parts of the buffer are written in different languages.
1313 pub fn set_language_registry(&self, language_registry: Arc<LanguageRegistry>) {
1314 self.syntax_map
1315 .lock()
1316 .set_language_registry(language_registry);
1317 }
1318
1319 pub fn language_registry(&self) -> Option<Arc<LanguageRegistry>> {
1320 self.syntax_map.lock().language_registry()
1321 }
1322
1323 /// Assign the line ending type to the buffer.
1324 pub fn set_line_ending(&mut self, line_ending: LineEnding, cx: &mut Context<Self>) {
1325 self.text.set_line_ending(line_ending);
1326
1327 let lamport_timestamp = self.text.lamport_clock.tick();
1328 self.send_operation(
1329 Operation::UpdateLineEnding {
1330 line_ending,
1331 lamport_timestamp,
1332 },
1333 true,
1334 cx,
1335 );
1336 }
1337
1338 /// Assign the buffer a new [`Capability`].
1339 pub fn set_capability(&mut self, capability: Capability, cx: &mut Context<Self>) {
1340 if self.capability != capability {
1341 self.capability = capability;
1342 cx.emit(BufferEvent::CapabilityChanged)
1343 }
1344 }
1345
1346 /// This method is called to signal that the buffer has been saved.
1347 pub fn did_save(
1348 &mut self,
1349 version: clock::Global,
1350 mtime: Option<MTime>,
1351 cx: &mut Context<Self>,
1352 ) {
1353 self.saved_version = version.clone();
1354 self.has_unsaved_edits.set((version, false));
1355 self.has_conflict = false;
1356 self.saved_mtime = mtime;
1357 self.was_changed();
1358 cx.emit(BufferEvent::Saved);
1359 cx.notify();
1360 }
1361
1362 /// Reloads the contents of the buffer from disk.
1363 pub fn reload(&mut self, cx: &Context<Self>) -> oneshot::Receiver<Option<Transaction>> {
1364 let (tx, rx) = futures::channel::oneshot::channel();
1365
1366 let prev_version = self.text.version();
1367 self.reload_task = Some(cx.spawn(async move |this, cx| {
1368 let Some((new_mtime, new_text)) = this.update(cx, |this, cx| {
1369 let file = this.file.as_ref()?.as_local()?;
1370
1371 Some((file.disk_state().mtime(), {
1372 file.load(
1373 cx,
1374 EncodingOptions {
1375 expected: this.encoding,
1376 auto_detect: false,
1377 },
1378 )
1379 }))
1380 })?
1381 else {
1382 return Ok(());
1383 };
1384
1385 let (new_encoding, new_text) = new_text.await?;
1386 let diff = this
1387 .update(cx, |this, cx| this.diff(new_text.clone(), cx))?
1388 .await;
1389 this.update(cx, |this, cx| {
1390 if this.version() == diff.base_version {
1391 this.finalize_last_transaction();
1392 this.apply_diff(diff, cx);
1393 tx.send(this.finalize_last_transaction().cloned()).ok();
1394 this.has_conflict = false;
1395 if new_encoding != this.encoding {
1396 this.set_encoding(new_encoding, cx);
1397 }
1398 this.did_reload(this.version(), this.line_ending(), new_mtime, cx);
1399 } else {
1400 if !diff.edits.is_empty()
1401 || this
1402 .edits_since::<usize>(&diff.base_version)
1403 .next()
1404 .is_some()
1405 {
1406 this.has_conflict = true;
1407 }
1408
1409 this.did_reload(prev_version, this.line_ending(), this.saved_mtime, cx);
1410 }
1411
1412 this.reload_task.take();
1413 })
1414 }));
1415 rx
1416 }
1417
1418 /// This method is called to signal that the buffer has been reloaded.
1419 pub fn did_reload(
1420 &mut self,
1421 version: clock::Global,
1422 line_ending: LineEnding,
1423 mtime: Option<MTime>,
1424 cx: &mut Context<Self>,
1425 ) {
1426 self.saved_version = version;
1427 self.has_unsaved_edits
1428 .set((self.saved_version.clone(), false));
1429 self.text.set_line_ending(line_ending);
1430 self.saved_mtime = mtime;
1431 cx.emit(BufferEvent::Reloaded);
1432 cx.notify();
1433 }
1434
1435 pub fn replace_file(&mut self, new_file: Arc<dyn File>) {
1436 self.file = Some(new_file);
1437 }
1438 /// Updates the [`File`] backing this buffer. This should be called when
1439 /// the file has changed or has been deleted.
1440 pub fn file_updated(&mut self, new_file: Arc<dyn File>, cx: &mut Context<Self>) {
1441 let was_dirty = self.is_dirty();
1442 let mut file_changed = false;
1443
1444 if let Some(old_file) = self.file.as_ref() {
1445 if new_file.path() != old_file.path() {
1446 file_changed = true;
1447 }
1448
1449 let old_state = old_file.disk_state();
1450 let new_state = new_file.disk_state();
1451 if old_state != new_state {
1452 file_changed = true;
1453 if !was_dirty && matches!(new_state, DiskState::Present { .. }) {
1454 cx.emit(BufferEvent::ReloadNeeded)
1455 }
1456 }
1457 } else {
1458 file_changed = true;
1459 };
1460
1461 self.file = Some(new_file);
1462 if file_changed {
1463 self.was_changed();
1464 self.non_text_state_update_count += 1;
1465 if was_dirty != self.is_dirty() {
1466 cx.emit(BufferEvent::DirtyChanged);
1467 }
1468 cx.emit(BufferEvent::FileHandleChanged);
1469 cx.notify();
1470 }
1471 }
1472
1473 pub fn base_buffer(&self) -> Option<Entity<Self>> {
1474 Some(self.branch_state.as_ref()?.base_buffer.clone())
1475 }
1476
1477 /// Returns the primary [`Language`] assigned to this [`Buffer`].
1478 pub fn language(&self) -> Option<&Arc<Language>> {
1479 self.language.as_ref()
1480 }
1481
1482 /// Returns the [`Language`] at the given location.
1483 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
1484 let offset = position.to_offset(self);
1485 let mut is_first = true;
1486 let start_anchor = self.anchor_before(offset);
1487 let end_anchor = self.anchor_after(offset);
1488 self.syntax_map
1489 .lock()
1490 .layers_for_range(offset..offset, &self.text, false)
1491 .filter(|layer| {
1492 if is_first {
1493 is_first = false;
1494 return true;
1495 }
1496
1497 layer
1498 .included_sub_ranges
1499 .map(|sub_ranges| {
1500 sub_ranges.iter().any(|sub_range| {
1501 let is_before_start = sub_range.end.cmp(&start_anchor, self).is_lt();
1502 let is_after_end = sub_range.start.cmp(&end_anchor, self).is_gt();
1503 !is_before_start && !is_after_end
1504 })
1505 })
1506 .unwrap_or(true)
1507 })
1508 .last()
1509 .map(|info| info.language.clone())
1510 .or_else(|| self.language.clone())
1511 }
1512
1513 /// Returns each [`Language`] for the active syntax layers at the given location.
1514 pub fn languages_at<D: ToOffset>(&self, position: D) -> Vec<Arc<Language>> {
1515 let offset = position.to_offset(self);
1516 let mut languages: Vec<Arc<Language>> = self
1517 .syntax_map
1518 .lock()
1519 .layers_for_range(offset..offset, &self.text, false)
1520 .map(|info| info.language.clone())
1521 .collect();
1522
1523 if languages.is_empty()
1524 && let Some(buffer_language) = self.language()
1525 {
1526 languages.push(buffer_language.clone());
1527 }
1528
1529 languages
1530 }
1531
1532 /// An integer version number that accounts for all updates besides
1533 /// the buffer's text itself (which is versioned via a version vector).
1534 pub fn non_text_state_update_count(&self) -> usize {
1535 self.non_text_state_update_count
1536 }
1537
1538 /// Whether the buffer is being parsed in the background.
1539 #[cfg(any(test, feature = "test-support"))]
1540 pub fn is_parsing(&self) -> bool {
1541 self.reparse.is_some()
1542 }
1543
1544 /// Indicates whether the buffer contains any regions that may be
1545 /// written in a language that hasn't been loaded yet.
1546 pub fn contains_unknown_injections(&self) -> bool {
1547 self.syntax_map.lock().contains_unknown_injections()
1548 }
1549
1550 #[cfg(any(test, feature = "test-support"))]
1551 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
1552 self.sync_parse_timeout = timeout;
1553 }
1554
1555 /// Called after an edit to synchronize the buffer's main parse tree with
1556 /// the buffer's new underlying state.
1557 ///
1558 /// Locks the syntax map and interpolates the edits since the last reparse
1559 /// into the foreground syntax tree.
1560 ///
1561 /// Then takes a stable snapshot of the syntax map before unlocking it.
1562 /// The snapshot with the interpolated edits is sent to a background thread,
1563 /// where we ask Tree-sitter to perform an incremental parse.
1564 ///
1565 /// Meanwhile, in the foreground, we block the main thread for up to 1ms
1566 /// waiting on the parse to complete. As soon as it completes, we proceed
1567 /// synchronously, unless a 1ms timeout elapses.
1568 ///
1569 /// If we time out waiting on the parse, we spawn a second task waiting
1570 /// until the parse does complete and return with the interpolated tree still
1571 /// in the foreground. When the background parse completes, call back into
1572 /// the main thread and assign the foreground parse state.
1573 ///
1574 /// If the buffer or grammar changed since the start of the background parse,
1575 /// initiate an additional reparse recursively. To avoid concurrent parses
1576 /// for the same buffer, we only initiate a new parse if we are not already
1577 /// parsing in the background.
1578 pub fn reparse(&mut self, cx: &mut Context<Self>) {
1579 if self.reparse.is_some() {
1580 return;
1581 }
1582 let language = if let Some(language) = self.language.clone() {
1583 language
1584 } else {
1585 return;
1586 };
1587
1588 let text = self.text_snapshot();
1589 let parsed_version = self.version();
1590
1591 let mut syntax_map = self.syntax_map.lock();
1592 syntax_map.interpolate(&text);
1593 let language_registry = syntax_map.language_registry();
1594 let mut syntax_snapshot = syntax_map.snapshot();
1595 drop(syntax_map);
1596
1597 let parse_task = cx.background_spawn({
1598 let language = language.clone();
1599 let language_registry = language_registry.clone();
1600 async move {
1601 syntax_snapshot.reparse(&text, language_registry, language);
1602 syntax_snapshot
1603 }
1604 });
1605
1606 self.parse_status.0.send(ParseStatus::Parsing).unwrap();
1607 match cx
1608 .background_executor()
1609 .block_with_timeout(self.sync_parse_timeout, parse_task)
1610 {
1611 Ok(new_syntax_snapshot) => {
1612 self.did_finish_parsing(new_syntax_snapshot, cx);
1613 self.reparse = None;
1614 }
1615 Err(parse_task) => {
1616 // todo(lw): hot foreground spawn
1617 self.reparse = Some(cx.spawn(async move |this, cx| {
1618 let new_syntax_map = cx.background_spawn(parse_task).await;
1619 this.update(cx, move |this, cx| {
1620 let grammar_changed = || {
1621 this.language.as_ref().is_none_or(|current_language| {
1622 !Arc::ptr_eq(&language, current_language)
1623 })
1624 };
1625 let language_registry_changed = || {
1626 new_syntax_map.contains_unknown_injections()
1627 && language_registry.is_some_and(|registry| {
1628 registry.version() != new_syntax_map.language_registry_version()
1629 })
1630 };
1631 let parse_again = this.version.changed_since(&parsed_version)
1632 || language_registry_changed()
1633 || grammar_changed();
1634 this.did_finish_parsing(new_syntax_map, cx);
1635 this.reparse = None;
1636 if parse_again {
1637 this.reparse(cx);
1638 }
1639 })
1640 .ok();
1641 }));
1642 }
1643 }
1644 }
1645
1646 fn did_finish_parsing(&mut self, syntax_snapshot: SyntaxSnapshot, cx: &mut Context<Self>) {
1647 self.was_changed();
1648 self.non_text_state_update_count += 1;
1649 self.syntax_map.lock().did_parse(syntax_snapshot);
1650 self.request_autoindent(cx);
1651 self.parse_status.0.send(ParseStatus::Idle).unwrap();
1652 cx.emit(BufferEvent::Reparsed);
1653 cx.notify();
1654 }
1655
1656 pub fn parse_status(&self) -> watch::Receiver<ParseStatus> {
1657 self.parse_status.1.clone()
1658 }
1659
1660 /// Assign to the buffer a set of diagnostics created by a given language server.
1661 pub fn update_diagnostics(
1662 &mut self,
1663 server_id: LanguageServerId,
1664 diagnostics: DiagnosticSet,
1665 cx: &mut Context<Self>,
1666 ) {
1667 let lamport_timestamp = self.text.lamport_clock.tick();
1668 let op = Operation::UpdateDiagnostics {
1669 server_id,
1670 diagnostics: diagnostics.iter().cloned().collect(),
1671 lamport_timestamp,
1672 };
1673
1674 self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
1675 self.send_operation(op, true, cx);
1676 }
1677
1678 pub fn buffer_diagnostics(
1679 &self,
1680 for_server: Option<LanguageServerId>,
1681 ) -> Vec<&DiagnosticEntry<Anchor>> {
1682 match for_server {
1683 Some(server_id) => match self.diagnostics.binary_search_by_key(&server_id, |v| v.0) {
1684 Ok(idx) => self.diagnostics[idx].1.iter().collect(),
1685 Err(_) => Vec::new(),
1686 },
1687 None => self
1688 .diagnostics
1689 .iter()
1690 .flat_map(|(_, diagnostic_set)| diagnostic_set.iter())
1691 .collect(),
1692 }
1693 }
1694
1695 fn request_autoindent(&mut self, cx: &mut Context<Self>) {
1696 if let Some(indent_sizes) = self.compute_autoindents() {
1697 let indent_sizes = cx.background_spawn(indent_sizes);
1698 match cx
1699 .background_executor()
1700 .block_with_timeout(Duration::from_micros(500), indent_sizes)
1701 {
1702 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
1703 Err(indent_sizes) => {
1704 self.pending_autoindent = Some(cx.spawn(async move |this, cx| {
1705 let indent_sizes = indent_sizes.await;
1706 this.update(cx, |this, cx| {
1707 this.apply_autoindents(indent_sizes, cx);
1708 })
1709 .ok();
1710 }));
1711 }
1712 }
1713 } else {
1714 self.autoindent_requests.clear();
1715 for tx in self.wait_for_autoindent_txs.drain(..) {
1716 tx.send(()).ok();
1717 }
1718 }
1719 }
1720
1721 fn compute_autoindents(
1722 &self,
1723 ) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>> + use<>> {
1724 let max_rows_between_yields = 100;
1725 let snapshot = self.snapshot();
1726 if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
1727 return None;
1728 }
1729
1730 let autoindent_requests = self.autoindent_requests.clone();
1731 Some(async move {
1732 let mut indent_sizes = BTreeMap::<u32, (IndentSize, bool)>::new();
1733 for request in autoindent_requests {
1734 // Resolve each edited range to its row in the current buffer and in the
1735 // buffer before this batch of edits.
1736 let mut row_ranges = Vec::new();
1737 let mut old_to_new_rows = BTreeMap::new();
1738 let mut language_indent_sizes_by_new_row = Vec::new();
1739 for entry in &request.entries {
1740 let position = entry.range.start;
1741 let new_row = position.to_point(&snapshot).row;
1742 let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
1743 language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
1744
1745 if !entry.first_line_is_new {
1746 let old_row = position.to_point(&request.before_edit).row;
1747 old_to_new_rows.insert(old_row, new_row);
1748 }
1749 row_ranges.push((new_row..new_end_row, entry.original_indent_column));
1750 }
1751
1752 // Build a map containing the suggested indentation for each of the edited lines
1753 // with respect to the state of the buffer before these edits. This map is keyed
1754 // by the rows for these lines in the current state of the buffer.
1755 let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
1756 let old_edited_ranges =
1757 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
1758 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1759 let mut language_indent_size = IndentSize::default();
1760 for old_edited_range in old_edited_ranges {
1761 let suggestions = request
1762 .before_edit
1763 .suggest_autoindents(old_edited_range.clone())
1764 .into_iter()
1765 .flatten();
1766 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1767 if let Some(suggestion) = suggestion {
1768 let new_row = *old_to_new_rows.get(&old_row).unwrap();
1769
1770 // Find the indent size based on the language for this row.
1771 while let Some((row, size)) = language_indent_sizes.peek() {
1772 if *row > new_row {
1773 break;
1774 }
1775 language_indent_size = *size;
1776 language_indent_sizes.next();
1777 }
1778
1779 let suggested_indent = old_to_new_rows
1780 .get(&suggestion.basis_row)
1781 .and_then(|from_row| {
1782 Some(old_suggestions.get(from_row).copied()?.0)
1783 })
1784 .unwrap_or_else(|| {
1785 request
1786 .before_edit
1787 .indent_size_for_line(suggestion.basis_row)
1788 })
1789 .with_delta(suggestion.delta, language_indent_size);
1790 old_suggestions
1791 .insert(new_row, (suggested_indent, suggestion.within_error));
1792 }
1793 }
1794 yield_now().await;
1795 }
1796
1797 // Compute new suggestions for each line, but only include them in the result
1798 // if they differ from the old suggestion for that line.
1799 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1800 let mut language_indent_size = IndentSize::default();
1801 for (row_range, original_indent_column) in row_ranges {
1802 let new_edited_row_range = if request.is_block_mode {
1803 row_range.start..row_range.start + 1
1804 } else {
1805 row_range.clone()
1806 };
1807
1808 let suggestions = snapshot
1809 .suggest_autoindents(new_edited_row_range.clone())
1810 .into_iter()
1811 .flatten();
1812 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
1813 if let Some(suggestion) = suggestion {
1814 // Find the indent size based on the language for this row.
1815 while let Some((row, size)) = language_indent_sizes.peek() {
1816 if *row > new_row {
1817 break;
1818 }
1819 language_indent_size = *size;
1820 language_indent_sizes.next();
1821 }
1822
1823 let suggested_indent = indent_sizes
1824 .get(&suggestion.basis_row)
1825 .copied()
1826 .map(|e| e.0)
1827 .unwrap_or_else(|| {
1828 snapshot.indent_size_for_line(suggestion.basis_row)
1829 })
1830 .with_delta(suggestion.delta, language_indent_size);
1831
1832 if old_suggestions.get(&new_row).is_none_or(
1833 |(old_indentation, was_within_error)| {
1834 suggested_indent != *old_indentation
1835 && (!suggestion.within_error || *was_within_error)
1836 },
1837 ) {
1838 indent_sizes.insert(
1839 new_row,
1840 (suggested_indent, request.ignore_empty_lines),
1841 );
1842 }
1843 }
1844 }
1845
1846 if let (true, Some(original_indent_column)) =
1847 (request.is_block_mode, original_indent_column)
1848 {
1849 let new_indent =
1850 if let Some((indent, _)) = indent_sizes.get(&row_range.start) {
1851 *indent
1852 } else {
1853 snapshot.indent_size_for_line(row_range.start)
1854 };
1855 let delta = new_indent.len as i64 - original_indent_column as i64;
1856 if delta != 0 {
1857 for row in row_range.skip(1) {
1858 indent_sizes.entry(row).or_insert_with(|| {
1859 let mut size = snapshot.indent_size_for_line(row);
1860 if size.kind == new_indent.kind {
1861 match delta.cmp(&0) {
1862 Ordering::Greater => size.len += delta as u32,
1863 Ordering::Less => {
1864 size.len = size.len.saturating_sub(-delta as u32)
1865 }
1866 Ordering::Equal => {}
1867 }
1868 }
1869 (size, request.ignore_empty_lines)
1870 });
1871 }
1872 }
1873 }
1874
1875 yield_now().await;
1876 }
1877 }
1878
1879 indent_sizes
1880 .into_iter()
1881 .filter_map(|(row, (indent, ignore_empty_lines))| {
1882 if ignore_empty_lines && snapshot.line_len(row) == 0 {
1883 None
1884 } else {
1885 Some((row, indent))
1886 }
1887 })
1888 .collect()
1889 })
1890 }
1891
1892 fn apply_autoindents(
1893 &mut self,
1894 indent_sizes: BTreeMap<u32, IndentSize>,
1895 cx: &mut Context<Self>,
1896 ) {
1897 self.autoindent_requests.clear();
1898 for tx in self.wait_for_autoindent_txs.drain(..) {
1899 tx.send(()).ok();
1900 }
1901
1902 let edits: Vec<_> = indent_sizes
1903 .into_iter()
1904 .filter_map(|(row, indent_size)| {
1905 let current_size = indent_size_for_line(self, row);
1906 Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
1907 })
1908 .collect();
1909
1910 let preserve_preview = self.preserve_preview();
1911 self.edit(edits, None, cx);
1912 if preserve_preview {
1913 self.refresh_preview();
1914 }
1915 }
1916
1917 /// Create a minimal edit that will cause the given row to be indented
1918 /// with the given size. After applying this edit, the length of the line
1919 /// will always be at least `new_size.len`.
1920 pub fn edit_for_indent_size_adjustment(
1921 row: u32,
1922 current_size: IndentSize,
1923 new_size: IndentSize,
1924 ) -> Option<(Range<Point>, String)> {
1925 if new_size.kind == current_size.kind {
1926 match new_size.len.cmp(¤t_size.len) {
1927 Ordering::Greater => {
1928 let point = Point::new(row, 0);
1929 Some((
1930 point..point,
1931 iter::repeat(new_size.char())
1932 .take((new_size.len - current_size.len) as usize)
1933 .collect::<String>(),
1934 ))
1935 }
1936
1937 Ordering::Less => Some((
1938 Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
1939 String::new(),
1940 )),
1941
1942 Ordering::Equal => None,
1943 }
1944 } else {
1945 Some((
1946 Point::new(row, 0)..Point::new(row, current_size.len),
1947 iter::repeat(new_size.char())
1948 .take(new_size.len as usize)
1949 .collect::<String>(),
1950 ))
1951 }
1952 }
1953
1954 /// Spawns a background task that asynchronously computes a `Diff` between the buffer's text
1955 /// and the given new text.
1956 pub fn diff(&self, mut new_text: String, cx: &App) -> Task<Diff> {
1957 let old_text = self.as_rope().clone();
1958 let base_version = self.version();
1959 cx.background_executor()
1960 .spawn_labeled(*BUFFER_DIFF_TASK, async move {
1961 let old_text = old_text.to_string();
1962 let line_ending = LineEnding::detect(&new_text);
1963 LineEnding::normalize(&mut new_text);
1964 let edits = text_diff(&old_text, &new_text);
1965 Diff {
1966 base_version,
1967 line_ending,
1968 edits,
1969 }
1970 })
1971 }
1972
1973 /// Spawns a background task that searches the buffer for any whitespace
1974 /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
1975 pub fn remove_trailing_whitespace(&self, cx: &App) -> Task<Diff> {
1976 let old_text = self.as_rope().clone();
1977 let line_ending = self.line_ending();
1978 let base_version = self.version();
1979 cx.background_spawn(async move {
1980 let ranges = trailing_whitespace_ranges(&old_text);
1981 let empty = Arc::<str>::from("");
1982 Diff {
1983 base_version,
1984 line_ending,
1985 edits: ranges
1986 .into_iter()
1987 .map(|range| (range, empty.clone()))
1988 .collect(),
1989 }
1990 })
1991 }
1992
1993 /// Ensures that the buffer ends with a single newline character, and
1994 /// no other whitespace. Skips if the buffer is empty.
1995 pub fn ensure_final_newline(&mut self, cx: &mut Context<Self>) {
1996 let len = self.len();
1997 if len == 0 {
1998 return;
1999 }
2000 let mut offset = len;
2001 for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
2002 let non_whitespace_len = chunk
2003 .trim_end_matches(|c: char| c.is_ascii_whitespace())
2004 .len();
2005 offset -= chunk.len();
2006 offset += non_whitespace_len;
2007 if non_whitespace_len != 0 {
2008 if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
2009 return;
2010 }
2011 break;
2012 }
2013 }
2014 self.edit([(offset..len, "\n")], None, cx);
2015 }
2016
2017 /// Applies a diff to the buffer. If the buffer has changed since the given diff was
2018 /// calculated, then adjust the diff to account for those changes, and discard any
2019 /// parts of the diff that conflict with those changes.
2020 pub fn apply_diff(&mut self, diff: Diff, cx: &mut Context<Self>) -> Option<TransactionId> {
2021 let snapshot = self.snapshot();
2022 let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
2023 let mut delta = 0;
2024 let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
2025 while let Some(edit_since) = edits_since.peek() {
2026 // If the edit occurs after a diff hunk, then it does not
2027 // affect that hunk.
2028 if edit_since.old.start > range.end {
2029 break;
2030 }
2031 // If the edit precedes the diff hunk, then adjust the hunk
2032 // to reflect the edit.
2033 else if edit_since.old.end < range.start {
2034 delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
2035 edits_since.next();
2036 }
2037 // If the edit intersects a diff hunk, then discard that hunk.
2038 else {
2039 return None;
2040 }
2041 }
2042
2043 let start = (range.start as i64 + delta) as usize;
2044 let end = (range.end as i64 + delta) as usize;
2045 Some((start..end, new_text))
2046 });
2047
2048 self.start_transaction();
2049 self.text.set_line_ending(diff.line_ending);
2050 self.edit(adjusted_edits, None, cx);
2051 self.end_transaction(cx)
2052 }
2053
2054 pub fn has_unsaved_edits(&self) -> bool {
2055 let (last_version, has_unsaved_edits) = self.has_unsaved_edits.take();
2056
2057 if last_version == self.version {
2058 self.has_unsaved_edits
2059 .set((last_version, has_unsaved_edits));
2060 return has_unsaved_edits;
2061 }
2062
2063 let has_edits = self.has_edits_since(&self.saved_version);
2064 self.has_unsaved_edits
2065 .set((self.version.clone(), has_edits));
2066 has_edits
2067 }
2068
2069 /// Checks if the buffer has unsaved changes.
2070 pub fn is_dirty(&self) -> bool {
2071 if self.capability == Capability::ReadOnly {
2072 return false;
2073 }
2074 if self.has_conflict {
2075 return true;
2076 }
2077 match self.file.as_ref().map(|f| f.disk_state()) {
2078 Some(DiskState::New) | Some(DiskState::Deleted) => {
2079 !self.is_empty() && self.has_unsaved_edits()
2080 }
2081 _ => self.has_unsaved_edits(),
2082 }
2083 }
2084
2085 /// Checks if the buffer and its file have both changed since the buffer
2086 /// was last saved or reloaded.
2087 pub fn has_conflict(&self) -> bool {
2088 if self.has_conflict {
2089 return true;
2090 }
2091 let Some(file) = self.file.as_ref() else {
2092 return false;
2093 };
2094 match file.disk_state() {
2095 DiskState::New => false,
2096 DiskState::Present { mtime } => match self.saved_mtime {
2097 Some(saved_mtime) => {
2098 mtime.bad_is_greater_than(saved_mtime) && self.has_unsaved_edits()
2099 }
2100 None => true,
2101 },
2102 DiskState::Deleted => false,
2103 }
2104 }
2105
2106 /// Gets a [`Subscription`] that tracks all of the changes to the buffer's text.
2107 pub fn subscribe(&mut self) -> Subscription {
2108 self.text.subscribe()
2109 }
2110
2111 /// Adds a bit to the list of bits that are set when the buffer's text changes.
2112 ///
2113 /// This allows downstream code to check if the buffer's text has changed without
2114 /// waiting for an effect cycle, which would be required if using eents.
2115 pub fn record_changes(&mut self, bit: rc::Weak<Cell<bool>>) {
2116 if let Err(ix) = self
2117 .change_bits
2118 .binary_search_by_key(&rc::Weak::as_ptr(&bit), rc::Weak::as_ptr)
2119 {
2120 self.change_bits.insert(ix, bit);
2121 }
2122 }
2123
2124 /// Set the change bit for all "listeners".
2125 fn was_changed(&mut self) {
2126 self.change_bits.retain(|change_bit| {
2127 change_bit
2128 .upgrade()
2129 .inspect(|bit| {
2130 _ = bit.replace(true);
2131 })
2132 .is_some()
2133 });
2134 }
2135
2136 /// Starts a transaction, if one is not already in-progress. When undoing or
2137 /// redoing edits, all of the edits performed within a transaction are undone
2138 /// or redone together.
2139 pub fn start_transaction(&mut self) -> Option<TransactionId> {
2140 self.start_transaction_at(Instant::now())
2141 }
2142
2143 /// Starts a transaction, providing the current time. Subsequent transactions
2144 /// that occur within a short period of time will be grouped together. This
2145 /// is controlled by the buffer's undo grouping duration.
2146 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
2147 self.transaction_depth += 1;
2148 if self.was_dirty_before_starting_transaction.is_none() {
2149 self.was_dirty_before_starting_transaction = Some(self.is_dirty());
2150 }
2151 self.text.start_transaction_at(now)
2152 }
2153
2154 /// Terminates the current transaction, if this is the outermost transaction.
2155 pub fn end_transaction(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2156 self.end_transaction_at(Instant::now(), cx)
2157 }
2158
2159 /// Terminates the current transaction, providing the current time. Subsequent transactions
2160 /// that occur within a short period of time will be grouped together. This
2161 /// is controlled by the buffer's undo grouping duration.
2162 pub fn end_transaction_at(
2163 &mut self,
2164 now: Instant,
2165 cx: &mut Context<Self>,
2166 ) -> Option<TransactionId> {
2167 assert!(self.transaction_depth > 0);
2168 self.transaction_depth -= 1;
2169 let was_dirty = if self.transaction_depth == 0 {
2170 self.was_dirty_before_starting_transaction.take().unwrap()
2171 } else {
2172 false
2173 };
2174 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
2175 self.did_edit(&start_version, was_dirty, cx);
2176 Some(transaction_id)
2177 } else {
2178 None
2179 }
2180 }
2181
2182 /// Manually add a transaction to the buffer's undo history.
2183 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
2184 self.text.push_transaction(transaction, now);
2185 }
2186
2187 /// Differs from `push_transaction` in that it does not clear the redo
2188 /// stack. Intended to be used to create a parent transaction to merge
2189 /// potential child transactions into.
2190 ///
2191 /// The caller is responsible for removing it from the undo history using
2192 /// `forget_transaction` if no edits are merged into it. Otherwise, if edits
2193 /// are merged into this transaction, the caller is responsible for ensuring
2194 /// the redo stack is cleared. The easiest way to ensure the redo stack is
2195 /// cleared is to create transactions with the usual `start_transaction` and
2196 /// `end_transaction` methods and merging the resulting transactions into
2197 /// the transaction created by this method
2198 pub fn push_empty_transaction(&mut self, now: Instant) -> TransactionId {
2199 self.text.push_empty_transaction(now)
2200 }
2201
2202 /// Prevent the last transaction from being grouped with any subsequent transactions,
2203 /// even if they occur with the buffer's undo grouping duration.
2204 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
2205 self.text.finalize_last_transaction()
2206 }
2207
2208 /// Manually group all changes since a given transaction.
2209 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
2210 self.text.group_until_transaction(transaction_id);
2211 }
2212
2213 /// Manually remove a transaction from the buffer's undo history
2214 pub fn forget_transaction(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
2215 self.text.forget_transaction(transaction_id)
2216 }
2217
2218 /// Retrieve a transaction from the buffer's undo history
2219 pub fn get_transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> {
2220 self.text.get_transaction(transaction_id)
2221 }
2222
2223 /// Manually merge two transactions in the buffer's undo history.
2224 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
2225 self.text.merge_transactions(transaction, destination);
2226 }
2227
2228 /// Waits for the buffer to receive operations with the given timestamps.
2229 pub fn wait_for_edits<It: IntoIterator<Item = clock::Lamport>>(
2230 &mut self,
2231 edit_ids: It,
2232 ) -> impl Future<Output = Result<()>> + use<It> {
2233 self.text.wait_for_edits(edit_ids)
2234 }
2235
2236 /// Waits for the buffer to receive the operations necessary for resolving the given anchors.
2237 pub fn wait_for_anchors<It: IntoIterator<Item = Anchor>>(
2238 &mut self,
2239 anchors: It,
2240 ) -> impl 'static + Future<Output = Result<()>> + use<It> {
2241 self.text.wait_for_anchors(anchors)
2242 }
2243
2244 /// Waits for the buffer to receive operations up to the given version.
2245 pub fn wait_for_version(
2246 &mut self,
2247 version: clock::Global,
2248 ) -> impl Future<Output = Result<()>> + use<> {
2249 self.text.wait_for_version(version)
2250 }
2251
2252 /// Forces all futures returned by [`Buffer::wait_for_version`], [`Buffer::wait_for_edits`], or
2253 /// [`Buffer::wait_for_version`] to resolve with an error.
2254 pub fn give_up_waiting(&mut self) {
2255 self.text.give_up_waiting();
2256 }
2257
2258 pub fn wait_for_autoindent_applied(&mut self) -> Option<oneshot::Receiver<()>> {
2259 let mut rx = None;
2260 if !self.autoindent_requests.is_empty() {
2261 let channel = oneshot::channel();
2262 self.wait_for_autoindent_txs.push(channel.0);
2263 rx = Some(channel.1);
2264 }
2265 rx
2266 }
2267
2268 /// Stores a set of selections that should be broadcasted to all of the buffer's replicas.
2269 pub fn set_active_selections(
2270 &mut self,
2271 selections: Arc<[Selection<Anchor>]>,
2272 line_mode: bool,
2273 cursor_shape: CursorShape,
2274 cx: &mut Context<Self>,
2275 ) {
2276 let lamport_timestamp = self.text.lamport_clock.tick();
2277 self.remote_selections.insert(
2278 self.text.replica_id(),
2279 SelectionSet {
2280 selections: selections.clone(),
2281 lamport_timestamp,
2282 line_mode,
2283 cursor_shape,
2284 },
2285 );
2286 self.send_operation(
2287 Operation::UpdateSelections {
2288 selections,
2289 line_mode,
2290 lamport_timestamp,
2291 cursor_shape,
2292 },
2293 true,
2294 cx,
2295 );
2296 self.non_text_state_update_count += 1;
2297 cx.notify();
2298 }
2299
2300 /// Clears the selections, so that other replicas of the buffer do not see any selections for
2301 /// this replica.
2302 pub fn remove_active_selections(&mut self, cx: &mut Context<Self>) {
2303 if self
2304 .remote_selections
2305 .get(&self.text.replica_id())
2306 .is_none_or(|set| !set.selections.is_empty())
2307 {
2308 self.set_active_selections(Arc::default(), false, Default::default(), cx);
2309 }
2310 }
2311
2312 pub fn set_agent_selections(
2313 &mut self,
2314 selections: Arc<[Selection<Anchor>]>,
2315 line_mode: bool,
2316 cursor_shape: CursorShape,
2317 cx: &mut Context<Self>,
2318 ) {
2319 let lamport_timestamp = self.text.lamport_clock.tick();
2320 self.remote_selections.insert(
2321 ReplicaId::AGENT,
2322 SelectionSet {
2323 selections,
2324 lamport_timestamp,
2325 line_mode,
2326 cursor_shape,
2327 },
2328 );
2329 self.non_text_state_update_count += 1;
2330 cx.notify();
2331 }
2332
2333 pub fn remove_agent_selections(&mut self, cx: &mut Context<Self>) {
2334 self.set_agent_selections(Arc::default(), false, Default::default(), cx);
2335 }
2336
2337 /// Replaces the buffer's entire text.
2338 pub fn set_text<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2339 where
2340 T: Into<Arc<str>>,
2341 {
2342 self.autoindent_requests.clear();
2343 self.edit([(0..self.len(), text)], None, cx)
2344 }
2345
2346 /// Appends the given text to the end of the buffer.
2347 pub fn append<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2348 where
2349 T: Into<Arc<str>>,
2350 {
2351 self.edit([(self.len()..self.len(), text)], None, cx)
2352 }
2353
2354 /// Applies the given edits to the buffer. Each edit is specified as a range of text to
2355 /// delete, and a string of text to insert at that location.
2356 ///
2357 /// If an [`AutoindentMode`] is provided, then the buffer will enqueue an auto-indent
2358 /// request for the edited ranges, which will be processed when the buffer finishes
2359 /// parsing.
2360 ///
2361 /// Parsing takes place at the end of a transaction, and may compute synchronously
2362 /// or asynchronously, depending on the changes.
2363 pub fn edit<I, S, T>(
2364 &mut self,
2365 edits_iter: I,
2366 autoindent_mode: Option<AutoindentMode>,
2367 cx: &mut Context<Self>,
2368 ) -> Option<clock::Lamport>
2369 where
2370 I: IntoIterator<Item = (Range<S>, T)>,
2371 S: ToOffset,
2372 T: Into<Arc<str>>,
2373 {
2374 // Skip invalid edits and coalesce contiguous ones.
2375 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
2376
2377 for (range, new_text) in edits_iter {
2378 let mut range = range.start.to_offset(self)..range.end.to_offset(self);
2379
2380 if range.start > range.end {
2381 mem::swap(&mut range.start, &mut range.end);
2382 }
2383 let new_text = new_text.into();
2384 if !new_text.is_empty() || !range.is_empty() {
2385 if let Some((prev_range, prev_text)) = edits.last_mut()
2386 && prev_range.end >= range.start
2387 {
2388 prev_range.end = cmp::max(prev_range.end, range.end);
2389 *prev_text = format!("{prev_text}{new_text}").into();
2390 } else {
2391 edits.push((range, new_text));
2392 }
2393 }
2394 }
2395 if edits.is_empty() {
2396 return None;
2397 }
2398
2399 self.start_transaction();
2400 self.pending_autoindent.take();
2401 let autoindent_request = autoindent_mode
2402 .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
2403
2404 let edit_operation = self
2405 .text
2406 .edit(edits.iter().cloned(), cx.background_executor());
2407 let edit_id = edit_operation.timestamp();
2408
2409 if let Some((before_edit, mode)) = autoindent_request {
2410 let mut delta = 0isize;
2411 let mut previous_setting = None;
2412 let entries: Vec<_> = edits
2413 .into_iter()
2414 .enumerate()
2415 .zip(&edit_operation.as_edit().unwrap().new_text)
2416 .filter(|((_, (range, _)), _)| {
2417 let language = before_edit.language_at(range.start);
2418 let language_id = language.map(|l| l.id());
2419 if let Some((cached_language_id, auto_indent)) = previous_setting
2420 && cached_language_id == language_id
2421 {
2422 auto_indent
2423 } else {
2424 // The auto-indent setting is not present in editorconfigs, hence
2425 // we can avoid passing the file here.
2426 let auto_indent =
2427 language_settings(language.map(|l| l.name()), None, cx).auto_indent;
2428 previous_setting = Some((language_id, auto_indent));
2429 auto_indent
2430 }
2431 })
2432 .map(|((ix, (range, _)), new_text)| {
2433 let new_text_length = new_text.len();
2434 let old_start = range.start.to_point(&before_edit);
2435 let new_start = (delta + range.start as isize) as usize;
2436 let range_len = range.end - range.start;
2437 delta += new_text_length as isize - range_len as isize;
2438
2439 // Decide what range of the insertion to auto-indent, and whether
2440 // the first line of the insertion should be considered a newly-inserted line
2441 // or an edit to an existing line.
2442 let mut range_of_insertion_to_indent = 0..new_text_length;
2443 let mut first_line_is_new = true;
2444
2445 let old_line_start = before_edit.indent_size_for_line(old_start.row).len;
2446 let old_line_end = before_edit.line_len(old_start.row);
2447
2448 if old_start.column > old_line_start {
2449 first_line_is_new = false;
2450 }
2451
2452 if !new_text.contains('\n')
2453 && (old_start.column + (range_len as u32) < old_line_end
2454 || old_line_end == old_line_start)
2455 {
2456 first_line_is_new = false;
2457 }
2458
2459 // When inserting text starting with a newline, avoid auto-indenting the
2460 // previous line.
2461 if new_text.starts_with('\n') {
2462 range_of_insertion_to_indent.start += 1;
2463 first_line_is_new = true;
2464 }
2465
2466 let mut original_indent_column = None;
2467 if let AutoindentMode::Block {
2468 original_indent_columns,
2469 } = &mode
2470 {
2471 original_indent_column = Some(if new_text.starts_with('\n') {
2472 indent_size_for_text(
2473 new_text[range_of_insertion_to_indent.clone()].chars(),
2474 )
2475 .len
2476 } else {
2477 original_indent_columns
2478 .get(ix)
2479 .copied()
2480 .flatten()
2481 .unwrap_or_else(|| {
2482 indent_size_for_text(
2483 new_text[range_of_insertion_to_indent.clone()].chars(),
2484 )
2485 .len
2486 })
2487 });
2488
2489 // Avoid auto-indenting the line after the edit.
2490 if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
2491 range_of_insertion_to_indent.end -= 1;
2492 }
2493 }
2494
2495 AutoindentRequestEntry {
2496 first_line_is_new,
2497 original_indent_column,
2498 indent_size: before_edit.language_indent_size_at(range.start, cx),
2499 range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
2500 ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
2501 }
2502 })
2503 .collect();
2504
2505 if !entries.is_empty() {
2506 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2507 before_edit,
2508 entries,
2509 is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
2510 ignore_empty_lines: false,
2511 }));
2512 }
2513 }
2514
2515 self.end_transaction(cx);
2516 self.send_operation(Operation::Buffer(edit_operation), true, cx);
2517 Some(edit_id)
2518 }
2519
2520 fn did_edit(&mut self, old_version: &clock::Global, was_dirty: bool, cx: &mut Context<Self>) {
2521 self.was_changed();
2522
2523 if self.edits_since::<usize>(old_version).next().is_none() {
2524 return;
2525 }
2526
2527 self.reparse(cx);
2528 cx.emit(BufferEvent::Edited);
2529 if was_dirty != self.is_dirty() {
2530 cx.emit(BufferEvent::DirtyChanged);
2531 }
2532 cx.notify();
2533 }
2534
2535 pub fn autoindent_ranges<I, T>(&mut self, ranges: I, cx: &mut Context<Self>)
2536 where
2537 I: IntoIterator<Item = Range<T>>,
2538 T: ToOffset + Copy,
2539 {
2540 let before_edit = self.snapshot();
2541 let entries = ranges
2542 .into_iter()
2543 .map(|range| AutoindentRequestEntry {
2544 range: before_edit.anchor_before(range.start)..before_edit.anchor_after(range.end),
2545 first_line_is_new: true,
2546 indent_size: before_edit.language_indent_size_at(range.start, cx),
2547 original_indent_column: None,
2548 })
2549 .collect();
2550 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2551 before_edit,
2552 entries,
2553 is_block_mode: false,
2554 ignore_empty_lines: true,
2555 }));
2556 self.request_autoindent(cx);
2557 }
2558
2559 // Inserts newlines at the given position to create an empty line, returning the start of the new line.
2560 // You can also request the insertion of empty lines above and below the line starting at the returned point.
2561 pub fn insert_empty_line(
2562 &mut self,
2563 position: impl ToPoint,
2564 space_above: bool,
2565 space_below: bool,
2566 cx: &mut Context<Self>,
2567 ) -> Point {
2568 let mut position = position.to_point(self);
2569
2570 self.start_transaction();
2571
2572 self.edit(
2573 [(position..position, "\n")],
2574 Some(AutoindentMode::EachLine),
2575 cx,
2576 );
2577
2578 if position.column > 0 {
2579 position += Point::new(1, 0);
2580 }
2581
2582 if !self.is_line_blank(position.row) {
2583 self.edit(
2584 [(position..position, "\n")],
2585 Some(AutoindentMode::EachLine),
2586 cx,
2587 );
2588 }
2589
2590 if space_above && position.row > 0 && !self.is_line_blank(position.row - 1) {
2591 self.edit(
2592 [(position..position, "\n")],
2593 Some(AutoindentMode::EachLine),
2594 cx,
2595 );
2596 position.row += 1;
2597 }
2598
2599 if space_below
2600 && (position.row == self.max_point().row || !self.is_line_blank(position.row + 1))
2601 {
2602 self.edit(
2603 [(position..position, "\n")],
2604 Some(AutoindentMode::EachLine),
2605 cx,
2606 );
2607 }
2608
2609 self.end_transaction(cx);
2610
2611 position
2612 }
2613
2614 /// Applies the given remote operations to the buffer.
2615 pub fn apply_ops<I: IntoIterator<Item = Operation>>(&mut self, ops: I, cx: &mut Context<Self>) {
2616 self.pending_autoindent.take();
2617 let was_dirty = self.is_dirty();
2618 let old_version = self.version.clone();
2619 let mut deferred_ops = Vec::new();
2620 let buffer_ops = ops
2621 .into_iter()
2622 .filter_map(|op| match op {
2623 Operation::Buffer(op) => Some(op),
2624 _ => {
2625 if self.can_apply_op(&op) {
2626 self.apply_op(op, cx);
2627 } else {
2628 deferred_ops.push(op);
2629 }
2630 None
2631 }
2632 })
2633 .collect::<Vec<_>>();
2634 for operation in buffer_ops.iter() {
2635 self.send_operation(Operation::Buffer(operation.clone()), false, cx);
2636 }
2637 self.text
2638 .apply_ops(buffer_ops, Some(cx.background_executor()));
2639 self.deferred_ops.insert(deferred_ops);
2640 self.flush_deferred_ops(cx);
2641 self.did_edit(&old_version, was_dirty, cx);
2642 // Notify independently of whether the buffer was edited as the operations could include a
2643 // selection update.
2644 cx.notify();
2645 }
2646
2647 fn flush_deferred_ops(&mut self, cx: &mut Context<Self>) {
2648 let mut deferred_ops = Vec::new();
2649 for op in self.deferred_ops.drain().iter().cloned() {
2650 if self.can_apply_op(&op) {
2651 self.apply_op(op, cx);
2652 } else {
2653 deferred_ops.push(op);
2654 }
2655 }
2656 self.deferred_ops.insert(deferred_ops);
2657 }
2658
2659 pub fn has_deferred_ops(&self) -> bool {
2660 !self.deferred_ops.is_empty() || self.text.has_deferred_ops()
2661 }
2662
2663 fn can_apply_op(&self, operation: &Operation) -> bool {
2664 match operation {
2665 Operation::Buffer(_) => {
2666 unreachable!("buffer operations should never be applied at this layer")
2667 }
2668 Operation::UpdateDiagnostics {
2669 diagnostics: diagnostic_set,
2670 ..
2671 } => diagnostic_set.iter().all(|diagnostic| {
2672 self.text.can_resolve(&diagnostic.range.start)
2673 && self.text.can_resolve(&diagnostic.range.end)
2674 }),
2675 Operation::UpdateSelections { selections, .. } => selections
2676 .iter()
2677 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
2678 Operation::UpdateCompletionTriggers { .. } | Operation::UpdateLineEnding { .. } => true,
2679 }
2680 }
2681
2682 fn apply_op(&mut self, operation: Operation, cx: &mut Context<Self>) {
2683 match operation {
2684 Operation::Buffer(_) => {
2685 unreachable!("buffer operations should never be applied at this layer")
2686 }
2687 Operation::UpdateDiagnostics {
2688 server_id,
2689 diagnostics: diagnostic_set,
2690 lamport_timestamp,
2691 } => {
2692 let snapshot = self.snapshot();
2693 self.apply_diagnostic_update(
2694 server_id,
2695 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
2696 lamport_timestamp,
2697 cx,
2698 );
2699 }
2700 Operation::UpdateSelections {
2701 selections,
2702 lamport_timestamp,
2703 line_mode,
2704 cursor_shape,
2705 } => {
2706 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id)
2707 && set.lamport_timestamp > lamport_timestamp
2708 {
2709 return;
2710 }
2711
2712 self.remote_selections.insert(
2713 lamport_timestamp.replica_id,
2714 SelectionSet {
2715 selections,
2716 lamport_timestamp,
2717 line_mode,
2718 cursor_shape,
2719 },
2720 );
2721 self.text.lamport_clock.observe(lamport_timestamp);
2722 self.non_text_state_update_count += 1;
2723 }
2724 Operation::UpdateCompletionTriggers {
2725 triggers,
2726 lamport_timestamp,
2727 server_id,
2728 } => {
2729 if triggers.is_empty() {
2730 self.completion_triggers_per_language_server
2731 .remove(&server_id);
2732 self.completion_triggers = self
2733 .completion_triggers_per_language_server
2734 .values()
2735 .flat_map(|triggers| triggers.iter().cloned())
2736 .collect();
2737 } else {
2738 self.completion_triggers_per_language_server
2739 .insert(server_id, triggers.iter().cloned().collect());
2740 self.completion_triggers.extend(triggers);
2741 }
2742 self.text.lamport_clock.observe(lamport_timestamp);
2743 }
2744 Operation::UpdateLineEnding {
2745 line_ending,
2746 lamport_timestamp,
2747 } => {
2748 self.text.set_line_ending(line_ending);
2749 self.text.lamport_clock.observe(lamport_timestamp);
2750 }
2751 }
2752 }
2753
2754 fn apply_diagnostic_update(
2755 &mut self,
2756 server_id: LanguageServerId,
2757 diagnostics: DiagnosticSet,
2758 lamport_timestamp: clock::Lamport,
2759 cx: &mut Context<Self>,
2760 ) {
2761 if lamport_timestamp > self.diagnostics_timestamp {
2762 let ix = self.diagnostics.binary_search_by_key(&server_id, |e| e.0);
2763 if diagnostics.is_empty() {
2764 if let Ok(ix) = ix {
2765 self.diagnostics.remove(ix);
2766 }
2767 } else {
2768 match ix {
2769 Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)),
2770 Ok(ix) => self.diagnostics[ix].1 = diagnostics,
2771 };
2772 }
2773 self.diagnostics_timestamp = lamport_timestamp;
2774 self.non_text_state_update_count += 1;
2775 self.text.lamport_clock.observe(lamport_timestamp);
2776 cx.notify();
2777 cx.emit(BufferEvent::DiagnosticsUpdated);
2778 }
2779 }
2780
2781 fn send_operation(&mut self, operation: Operation, is_local: bool, cx: &mut Context<Self>) {
2782 self.was_changed();
2783 cx.emit(BufferEvent::Operation {
2784 operation,
2785 is_local,
2786 });
2787 }
2788
2789 /// Removes the selections for a given peer.
2790 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut Context<Self>) {
2791 self.remote_selections.remove(&replica_id);
2792 cx.notify();
2793 }
2794
2795 /// Undoes the most recent transaction.
2796 pub fn undo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2797 let was_dirty = self.is_dirty();
2798 let old_version = self.version.clone();
2799
2800 if let Some((transaction_id, operation)) = self.text.undo() {
2801 self.send_operation(Operation::Buffer(operation), true, cx);
2802 self.did_edit(&old_version, was_dirty, cx);
2803 Some(transaction_id)
2804 } else {
2805 None
2806 }
2807 }
2808
2809 /// Manually undoes a specific transaction in the buffer's undo history.
2810 pub fn undo_transaction(
2811 &mut self,
2812 transaction_id: TransactionId,
2813 cx: &mut Context<Self>,
2814 ) -> bool {
2815 let was_dirty = self.is_dirty();
2816 let old_version = self.version.clone();
2817 if let Some(operation) = self.text.undo_transaction(transaction_id) {
2818 self.send_operation(Operation::Buffer(operation), true, cx);
2819 self.did_edit(&old_version, was_dirty, cx);
2820 true
2821 } else {
2822 false
2823 }
2824 }
2825
2826 /// Manually undoes all changes after a given transaction in the buffer's undo history.
2827 pub fn undo_to_transaction(
2828 &mut self,
2829 transaction_id: TransactionId,
2830 cx: &mut Context<Self>,
2831 ) -> bool {
2832 let was_dirty = self.is_dirty();
2833 let old_version = self.version.clone();
2834
2835 let operations = self.text.undo_to_transaction(transaction_id);
2836 let undone = !operations.is_empty();
2837 for operation in operations {
2838 self.send_operation(Operation::Buffer(operation), true, cx);
2839 }
2840 if undone {
2841 self.did_edit(&old_version, was_dirty, cx)
2842 }
2843 undone
2844 }
2845
2846 pub fn undo_operations(&mut self, counts: HashMap<Lamport, u32>, cx: &mut Context<Buffer>) {
2847 let was_dirty = self.is_dirty();
2848 let operation = self.text.undo_operations(counts);
2849 let old_version = self.version.clone();
2850 self.send_operation(Operation::Buffer(operation), true, cx);
2851 self.did_edit(&old_version, was_dirty, cx);
2852 }
2853
2854 /// Manually redoes a specific transaction in the buffer's redo history.
2855 pub fn redo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2856 let was_dirty = self.is_dirty();
2857 let old_version = self.version.clone();
2858
2859 if let Some((transaction_id, operation)) = self.text.redo() {
2860 self.send_operation(Operation::Buffer(operation), true, cx);
2861 self.did_edit(&old_version, was_dirty, cx);
2862 Some(transaction_id)
2863 } else {
2864 None
2865 }
2866 }
2867
2868 /// Manually undoes all changes until a given transaction in the buffer's redo history.
2869 pub fn redo_to_transaction(
2870 &mut self,
2871 transaction_id: TransactionId,
2872 cx: &mut Context<Self>,
2873 ) -> bool {
2874 let was_dirty = self.is_dirty();
2875 let old_version = self.version.clone();
2876
2877 let operations = self.text.redo_to_transaction(transaction_id);
2878 let redone = !operations.is_empty();
2879 for operation in operations {
2880 self.send_operation(Operation::Buffer(operation), true, cx);
2881 }
2882 if redone {
2883 self.did_edit(&old_version, was_dirty, cx)
2884 }
2885 redone
2886 }
2887
2888 /// Override current completion triggers with the user-provided completion triggers.
2889 pub fn set_completion_triggers(
2890 &mut self,
2891 server_id: LanguageServerId,
2892 triggers: BTreeSet<String>,
2893 cx: &mut Context<Self>,
2894 ) {
2895 self.completion_triggers_timestamp = self.text.lamport_clock.tick();
2896 if triggers.is_empty() {
2897 self.completion_triggers_per_language_server
2898 .remove(&server_id);
2899 self.completion_triggers = self
2900 .completion_triggers_per_language_server
2901 .values()
2902 .flat_map(|triggers| triggers.iter().cloned())
2903 .collect();
2904 } else {
2905 self.completion_triggers_per_language_server
2906 .insert(server_id, triggers.clone());
2907 self.completion_triggers.extend(triggers.iter().cloned());
2908 }
2909 self.send_operation(
2910 Operation::UpdateCompletionTriggers {
2911 triggers: triggers.into_iter().collect(),
2912 lamport_timestamp: self.completion_triggers_timestamp,
2913 server_id,
2914 },
2915 true,
2916 cx,
2917 );
2918 cx.notify();
2919 }
2920
2921 /// Returns a list of strings which trigger a completion menu for this language.
2922 /// Usually this is driven by LSP server which returns a list of trigger characters for completions.
2923 pub fn completion_triggers(&self) -> &BTreeSet<String> {
2924 &self.completion_triggers
2925 }
2926
2927 /// Call this directly after performing edits to prevent the preview tab
2928 /// from being dismissed by those edits. It causes `should_dismiss_preview`
2929 /// to return false until there are additional edits.
2930 pub fn refresh_preview(&mut self) {
2931 self.preview_version = self.version.clone();
2932 }
2933
2934 /// Whether we should preserve the preview status of a tab containing this buffer.
2935 pub fn preserve_preview(&self) -> bool {
2936 !self.has_edits_since(&self.preview_version)
2937 }
2938
2939 pub fn encoding(&self) -> Encoding {
2940 self.encoding
2941 }
2942
2943 /// Update the buffer
2944 pub fn set_encoding(&mut self, encoding: Encoding, cx: &mut Context<Self>) {
2945 self.encoding = encoding;
2946 cx.emit(BufferEvent::EncodingChanged);
2947 }
2948}
2949
2950#[doc(hidden)]
2951#[cfg(any(test, feature = "test-support"))]
2952impl Buffer {
2953 pub fn edit_via_marked_text(
2954 &mut self,
2955 marked_string: &str,
2956 autoindent_mode: Option<AutoindentMode>,
2957 cx: &mut Context<Self>,
2958 ) {
2959 let edits = self.edits_for_marked_text(marked_string);
2960 self.edit(edits, autoindent_mode, cx);
2961 }
2962
2963 pub fn set_group_interval(&mut self, group_interval: Duration) {
2964 self.text.set_group_interval(group_interval);
2965 }
2966
2967 pub fn randomly_edit<T>(&mut self, rng: &mut T, old_range_count: usize, cx: &mut Context<Self>)
2968 where
2969 T: rand::Rng,
2970 {
2971 let mut edits: Vec<(Range<usize>, String)> = Vec::new();
2972 let mut last_end = None;
2973 for _ in 0..old_range_count {
2974 if last_end.is_some_and(|last_end| last_end >= self.len()) {
2975 break;
2976 }
2977
2978 let new_start = last_end.map_or(0, |last_end| last_end + 1);
2979 let mut range = self.random_byte_range(new_start, rng);
2980 if rng.random_bool(0.2) {
2981 mem::swap(&mut range.start, &mut range.end);
2982 }
2983 last_end = Some(range.end);
2984
2985 let new_text_len = rng.random_range(0..10);
2986 let mut new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
2987 new_text = new_text.to_uppercase();
2988
2989 edits.push((range, new_text));
2990 }
2991 log::info!("mutating buffer {:?} with {:?}", self.replica_id(), edits);
2992 self.edit(edits, None, cx);
2993 }
2994
2995 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut Context<Self>) {
2996 let was_dirty = self.is_dirty();
2997 let old_version = self.version.clone();
2998
2999 let ops = self.text.randomly_undo_redo(rng);
3000 if !ops.is_empty() {
3001 for op in ops {
3002 self.send_operation(Operation::Buffer(op), true, cx);
3003 self.did_edit(&old_version, was_dirty, cx);
3004 }
3005 }
3006 }
3007}
3008
3009impl EventEmitter<BufferEvent> for Buffer {}
3010
3011impl Deref for Buffer {
3012 type Target = TextBuffer;
3013
3014 fn deref(&self) -> &Self::Target {
3015 &self.text
3016 }
3017}
3018
3019impl BufferSnapshot {
3020 /// Returns [`IndentSize`] for a given line that respects user settings and
3021 /// language preferences.
3022 pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
3023 indent_size_for_line(self, row)
3024 }
3025
3026 /// Returns [`IndentSize`] for a given position that respects user settings
3027 /// and language preferences.
3028 pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &App) -> IndentSize {
3029 let settings = language_settings(
3030 self.language_at(position).map(|l| l.name()),
3031 self.file(),
3032 cx,
3033 );
3034 if settings.hard_tabs {
3035 IndentSize::tab()
3036 } else {
3037 IndentSize::spaces(settings.tab_size.get())
3038 }
3039 }
3040
3041 /// Retrieve the suggested indent size for all of the given rows. The unit of indentation
3042 /// is passed in as `single_indent_size`.
3043 pub fn suggested_indents(
3044 &self,
3045 rows: impl Iterator<Item = u32>,
3046 single_indent_size: IndentSize,
3047 ) -> BTreeMap<u32, IndentSize> {
3048 let mut result = BTreeMap::new();
3049
3050 for row_range in contiguous_ranges(rows, 10) {
3051 let suggestions = match self.suggest_autoindents(row_range.clone()) {
3052 Some(suggestions) => suggestions,
3053 _ => break,
3054 };
3055
3056 for (row, suggestion) in row_range.zip(suggestions) {
3057 let indent_size = if let Some(suggestion) = suggestion {
3058 result
3059 .get(&suggestion.basis_row)
3060 .copied()
3061 .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
3062 .with_delta(suggestion.delta, single_indent_size)
3063 } else {
3064 self.indent_size_for_line(row)
3065 };
3066
3067 result.insert(row, indent_size);
3068 }
3069 }
3070
3071 result
3072 }
3073
3074 fn suggest_autoindents(
3075 &self,
3076 row_range: Range<u32>,
3077 ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
3078 let config = &self.language.as_ref()?.config;
3079 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
3080
3081 #[derive(Debug, Clone)]
3082 struct StartPosition {
3083 start: Point,
3084 suffix: SharedString,
3085 }
3086
3087 // Find the suggested indentation ranges based on the syntax tree.
3088 let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
3089 let end = Point::new(row_range.end, 0);
3090 let range = (start..end).to_offset(&self.text);
3091 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3092 Some(&grammar.indents_config.as_ref()?.query)
3093 });
3094 let indent_configs = matches
3095 .grammars()
3096 .iter()
3097 .map(|grammar| grammar.indents_config.as_ref().unwrap())
3098 .collect::<Vec<_>>();
3099
3100 let mut indent_ranges = Vec::<Range<Point>>::new();
3101 let mut start_positions = Vec::<StartPosition>::new();
3102 let mut outdent_positions = Vec::<Point>::new();
3103 while let Some(mat) = matches.peek() {
3104 let mut start: Option<Point> = None;
3105 let mut end: Option<Point> = None;
3106
3107 let config = indent_configs[mat.grammar_index];
3108 for capture in mat.captures {
3109 if capture.index == config.indent_capture_ix {
3110 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
3111 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
3112 } else if Some(capture.index) == config.start_capture_ix {
3113 start = Some(Point::from_ts_point(capture.node.end_position()));
3114 } else if Some(capture.index) == config.end_capture_ix {
3115 end = Some(Point::from_ts_point(capture.node.start_position()));
3116 } else if Some(capture.index) == config.outdent_capture_ix {
3117 outdent_positions.push(Point::from_ts_point(capture.node.start_position()));
3118 } else if let Some(suffix) = config.suffixed_start_captures.get(&capture.index) {
3119 start_positions.push(StartPosition {
3120 start: Point::from_ts_point(capture.node.start_position()),
3121 suffix: suffix.clone(),
3122 });
3123 }
3124 }
3125
3126 matches.advance();
3127 if let Some((start, end)) = start.zip(end) {
3128 if start.row == end.row {
3129 continue;
3130 }
3131 let range = start..end;
3132 match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
3133 Err(ix) => indent_ranges.insert(ix, range),
3134 Ok(ix) => {
3135 let prev_range = &mut indent_ranges[ix];
3136 prev_range.end = prev_range.end.max(range.end);
3137 }
3138 }
3139 }
3140 }
3141
3142 let mut error_ranges = Vec::<Range<Point>>::new();
3143 let mut matches = self
3144 .syntax
3145 .matches(range, &self.text, |grammar| grammar.error_query.as_ref());
3146 while let Some(mat) = matches.peek() {
3147 let node = mat.captures[0].node;
3148 let start = Point::from_ts_point(node.start_position());
3149 let end = Point::from_ts_point(node.end_position());
3150 let range = start..end;
3151 let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
3152 Ok(ix) | Err(ix) => ix,
3153 };
3154 let mut end_ix = ix;
3155 while let Some(existing_range) = error_ranges.get(end_ix) {
3156 if existing_range.end < end {
3157 end_ix += 1;
3158 } else {
3159 break;
3160 }
3161 }
3162 error_ranges.splice(ix..end_ix, [range]);
3163 matches.advance();
3164 }
3165
3166 outdent_positions.sort();
3167 for outdent_position in outdent_positions {
3168 // find the innermost indent range containing this outdent_position
3169 // set its end to the outdent position
3170 if let Some(range_to_truncate) = indent_ranges
3171 .iter_mut()
3172 .filter(|indent_range| indent_range.contains(&outdent_position))
3173 .next_back()
3174 {
3175 range_to_truncate.end = outdent_position;
3176 }
3177 }
3178
3179 start_positions.sort_by_key(|b| b.start);
3180
3181 // Find the suggested indentation increases and decreased based on regexes.
3182 let mut regex_outdent_map = HashMap::default();
3183 let mut last_seen_suffix: HashMap<String, Vec<Point>> = HashMap::default();
3184 let mut start_positions_iter = start_positions.iter().peekable();
3185
3186 let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
3187 self.for_each_line(
3188 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
3189 ..Point::new(row_range.end, 0),
3190 |row, line| {
3191 if config
3192 .decrease_indent_pattern
3193 .as_ref()
3194 .is_some_and(|regex| regex.is_match(line))
3195 {
3196 indent_change_rows.push((row, Ordering::Less));
3197 }
3198 if config
3199 .increase_indent_pattern
3200 .as_ref()
3201 .is_some_and(|regex| regex.is_match(line))
3202 {
3203 indent_change_rows.push((row + 1, Ordering::Greater));
3204 }
3205 while let Some(pos) = start_positions_iter.peek() {
3206 if pos.start.row < row {
3207 let pos = start_positions_iter.next().unwrap();
3208 last_seen_suffix
3209 .entry(pos.suffix.to_string())
3210 .or_default()
3211 .push(pos.start);
3212 } else {
3213 break;
3214 }
3215 }
3216 for rule in &config.decrease_indent_patterns {
3217 if rule.pattern.as_ref().is_some_and(|r| r.is_match(line)) {
3218 let row_start_column = self.indent_size_for_line(row).len;
3219 let basis_row = rule
3220 .valid_after
3221 .iter()
3222 .filter_map(|valid_suffix| last_seen_suffix.get(valid_suffix))
3223 .flatten()
3224 .filter(|start_point| start_point.column <= row_start_column)
3225 .max_by_key(|start_point| start_point.row);
3226 if let Some(outdent_to_row) = basis_row {
3227 regex_outdent_map.insert(row, outdent_to_row.row);
3228 }
3229 break;
3230 }
3231 }
3232 },
3233 );
3234
3235 let mut indent_changes = indent_change_rows.into_iter().peekable();
3236 let mut prev_row = if config.auto_indent_using_last_non_empty_line {
3237 prev_non_blank_row.unwrap_or(0)
3238 } else {
3239 row_range.start.saturating_sub(1)
3240 };
3241
3242 let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
3243 Some(row_range.map(move |row| {
3244 let row_start = Point::new(row, self.indent_size_for_line(row).len);
3245
3246 let mut indent_from_prev_row = false;
3247 let mut outdent_from_prev_row = false;
3248 let mut outdent_to_row = u32::MAX;
3249 let mut from_regex = false;
3250
3251 while let Some((indent_row, delta)) = indent_changes.peek() {
3252 match indent_row.cmp(&row) {
3253 Ordering::Equal => match delta {
3254 Ordering::Less => {
3255 from_regex = true;
3256 outdent_from_prev_row = true
3257 }
3258 Ordering::Greater => {
3259 indent_from_prev_row = true;
3260 from_regex = true
3261 }
3262 _ => {}
3263 },
3264
3265 Ordering::Greater => break,
3266 Ordering::Less => {}
3267 }
3268
3269 indent_changes.next();
3270 }
3271
3272 for range in &indent_ranges {
3273 if range.start.row >= row {
3274 break;
3275 }
3276 if range.start.row == prev_row && range.end > row_start {
3277 indent_from_prev_row = true;
3278 }
3279 if range.end > prev_row_start && range.end <= row_start {
3280 outdent_to_row = outdent_to_row.min(range.start.row);
3281 }
3282 }
3283
3284 if let Some(basis_row) = regex_outdent_map.get(&row) {
3285 indent_from_prev_row = false;
3286 outdent_to_row = *basis_row;
3287 from_regex = true;
3288 }
3289
3290 let within_error = error_ranges
3291 .iter()
3292 .any(|e| e.start.row < row && e.end > row_start);
3293
3294 let suggestion = if outdent_to_row == prev_row
3295 || (outdent_from_prev_row && indent_from_prev_row)
3296 {
3297 Some(IndentSuggestion {
3298 basis_row: prev_row,
3299 delta: Ordering::Equal,
3300 within_error: within_error && !from_regex,
3301 })
3302 } else if indent_from_prev_row {
3303 Some(IndentSuggestion {
3304 basis_row: prev_row,
3305 delta: Ordering::Greater,
3306 within_error: within_error && !from_regex,
3307 })
3308 } else if outdent_to_row < prev_row {
3309 Some(IndentSuggestion {
3310 basis_row: outdent_to_row,
3311 delta: Ordering::Equal,
3312 within_error: within_error && !from_regex,
3313 })
3314 } else if outdent_from_prev_row {
3315 Some(IndentSuggestion {
3316 basis_row: prev_row,
3317 delta: Ordering::Less,
3318 within_error: within_error && !from_regex,
3319 })
3320 } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
3321 {
3322 Some(IndentSuggestion {
3323 basis_row: prev_row,
3324 delta: Ordering::Equal,
3325 within_error: within_error && !from_regex,
3326 })
3327 } else {
3328 None
3329 };
3330
3331 prev_row = row;
3332 prev_row_start = row_start;
3333 suggestion
3334 }))
3335 }
3336
3337 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
3338 while row > 0 {
3339 row -= 1;
3340 if !self.is_line_blank(row) {
3341 return Some(row);
3342 }
3343 }
3344 None
3345 }
3346
3347 fn get_highlights(&self, range: Range<usize>) -> (SyntaxMapCaptures<'_>, Vec<HighlightMap>) {
3348 let captures = self.syntax.captures(range, &self.text, |grammar| {
3349 grammar
3350 .highlights_config
3351 .as_ref()
3352 .map(|config| &config.query)
3353 });
3354 let highlight_maps = captures
3355 .grammars()
3356 .iter()
3357 .map(|grammar| grammar.highlight_map())
3358 .collect();
3359 (captures, highlight_maps)
3360 }
3361
3362 /// Iterates over chunks of text in the given range of the buffer. Text is chunked
3363 /// in an arbitrary way due to being stored in a [`Rope`](text::Rope). The text is also
3364 /// returned in chunks where each chunk has a single syntax highlighting style and
3365 /// diagnostic status.
3366 pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks<'_> {
3367 let range = range.start.to_offset(self)..range.end.to_offset(self);
3368
3369 let mut syntax = None;
3370 if language_aware {
3371 syntax = Some(self.get_highlights(range.clone()));
3372 }
3373 // We want to look at diagnostic spans only when iterating over language-annotated chunks.
3374 let diagnostics = language_aware;
3375 BufferChunks::new(self.text.as_rope(), range, syntax, diagnostics, Some(self))
3376 }
3377
3378 pub fn highlighted_text_for_range<T: ToOffset>(
3379 &self,
3380 range: Range<T>,
3381 override_style: Option<HighlightStyle>,
3382 syntax_theme: &SyntaxTheme,
3383 ) -> HighlightedText {
3384 HighlightedText::from_buffer_range(
3385 range,
3386 &self.text,
3387 &self.syntax,
3388 override_style,
3389 syntax_theme,
3390 )
3391 }
3392
3393 /// Invokes the given callback for each line of text in the given range of the buffer.
3394 /// Uses callback to avoid allocating a string for each line.
3395 fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
3396 let mut line = String::new();
3397 let mut row = range.start.row;
3398 for chunk in self
3399 .as_rope()
3400 .chunks_in_range(range.to_offset(self))
3401 .chain(["\n"])
3402 {
3403 for (newline_ix, text) in chunk.split('\n').enumerate() {
3404 if newline_ix > 0 {
3405 callback(row, &line);
3406 row += 1;
3407 line.clear();
3408 }
3409 line.push_str(text);
3410 }
3411 }
3412 }
3413
3414 /// Iterates over every [`SyntaxLayer`] in the buffer.
3415 pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3416 self.syntax_layers_for_range(0..self.len(), true)
3417 }
3418
3419 pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayer<'_>> {
3420 let offset = position.to_offset(self);
3421 self.syntax_layers_for_range(offset..offset, false)
3422 .filter(|l| l.node().end_byte() > offset)
3423 .last()
3424 }
3425
3426 pub fn syntax_layers_for_range<D: ToOffset>(
3427 &self,
3428 range: Range<D>,
3429 include_hidden: bool,
3430 ) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3431 self.syntax
3432 .layers_for_range(range, &self.text, include_hidden)
3433 }
3434
3435 pub fn smallest_syntax_layer_containing<D: ToOffset>(
3436 &self,
3437 range: Range<D>,
3438 ) -> Option<SyntaxLayer<'_>> {
3439 let range = range.to_offset(self);
3440 self.syntax
3441 .layers_for_range(range, &self.text, false)
3442 .max_by(|a, b| {
3443 if a.depth != b.depth {
3444 a.depth.cmp(&b.depth)
3445 } else if a.offset.0 != b.offset.0 {
3446 a.offset.0.cmp(&b.offset.0)
3447 } else {
3448 a.node().end_byte().cmp(&b.node().end_byte()).reverse()
3449 }
3450 })
3451 }
3452
3453 /// Returns the main [`Language`].
3454 pub fn language(&self) -> Option<&Arc<Language>> {
3455 self.language.as_ref()
3456 }
3457
3458 /// Returns the [`Language`] at the given location.
3459 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
3460 self.syntax_layer_at(position)
3461 .map(|info| info.language)
3462 .or(self.language.as_ref())
3463 }
3464
3465 /// Returns the settings for the language at the given location.
3466 pub fn settings_at<'a, D: ToOffset>(
3467 &'a self,
3468 position: D,
3469 cx: &'a App,
3470 ) -> Cow<'a, LanguageSettings> {
3471 language_settings(
3472 self.language_at(position).map(|l| l.name()),
3473 self.file.as_ref(),
3474 cx,
3475 )
3476 }
3477
3478 pub fn char_classifier_at<T: ToOffset>(&self, point: T) -> CharClassifier {
3479 CharClassifier::new(self.language_scope_at(point))
3480 }
3481
3482 /// Returns the [`LanguageScope`] at the given location.
3483 pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
3484 let offset = position.to_offset(self);
3485 let mut scope = None;
3486 let mut smallest_range_and_depth: Option<(Range<usize>, usize)> = None;
3487
3488 // Use the layer that has the smallest node intersecting the given point.
3489 for layer in self
3490 .syntax
3491 .layers_for_range(offset..offset, &self.text, false)
3492 {
3493 let mut cursor = layer.node().walk();
3494
3495 let mut range = None;
3496 loop {
3497 let child_range = cursor.node().byte_range();
3498 if !child_range.contains(&offset) {
3499 break;
3500 }
3501
3502 range = Some(child_range);
3503 if cursor.goto_first_child_for_byte(offset).is_none() {
3504 break;
3505 }
3506 }
3507
3508 if let Some(range) = range
3509 && smallest_range_and_depth.as_ref().is_none_or(
3510 |(smallest_range, smallest_range_depth)| {
3511 if layer.depth > *smallest_range_depth {
3512 true
3513 } else if layer.depth == *smallest_range_depth {
3514 range.len() < smallest_range.len()
3515 } else {
3516 false
3517 }
3518 },
3519 )
3520 {
3521 smallest_range_and_depth = Some((range, layer.depth));
3522 scope = Some(LanguageScope {
3523 language: layer.language.clone(),
3524 override_id: layer.override_id(offset, &self.text),
3525 });
3526 }
3527 }
3528
3529 scope.or_else(|| {
3530 self.language.clone().map(|language| LanguageScope {
3531 language,
3532 override_id: None,
3533 })
3534 })
3535 }
3536
3537 /// Returns a tuple of the range and character kind of the word
3538 /// surrounding the given position.
3539 pub fn surrounding_word<T: ToOffset>(
3540 &self,
3541 start: T,
3542 scope_context: Option<CharScopeContext>,
3543 ) -> (Range<usize>, Option<CharKind>) {
3544 let mut start = start.to_offset(self);
3545 let mut end = start;
3546 let mut next_chars = self.chars_at(start).take(128).peekable();
3547 let mut prev_chars = self.reversed_chars_at(start).take(128).peekable();
3548
3549 let classifier = self.char_classifier_at(start).scope_context(scope_context);
3550 let word_kind = cmp::max(
3551 prev_chars.peek().copied().map(|c| classifier.kind(c)),
3552 next_chars.peek().copied().map(|c| classifier.kind(c)),
3553 );
3554
3555 for ch in prev_chars {
3556 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3557 start -= ch.len_utf8();
3558 } else {
3559 break;
3560 }
3561 }
3562
3563 for ch in next_chars {
3564 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3565 end += ch.len_utf8();
3566 } else {
3567 break;
3568 }
3569 }
3570
3571 (start..end, word_kind)
3572 }
3573
3574 /// Moves the TreeCursor to the smallest descendant or ancestor syntax node enclosing the given
3575 /// range. When `require_larger` is true, the node found must be larger than the query range.
3576 ///
3577 /// Returns true if a node was found, and false otherwise. In the `false` case the cursor will
3578 /// be moved to the root of the tree.
3579 fn goto_node_enclosing_range(
3580 cursor: &mut tree_sitter::TreeCursor,
3581 query_range: &Range<usize>,
3582 require_larger: bool,
3583 ) -> bool {
3584 let mut ascending = false;
3585 loop {
3586 let mut range = cursor.node().byte_range();
3587 if query_range.is_empty() {
3588 // When the query range is empty and the current node starts after it, move to the
3589 // previous sibling to find the node the containing node.
3590 if range.start > query_range.start {
3591 cursor.goto_previous_sibling();
3592 range = cursor.node().byte_range();
3593 }
3594 } else {
3595 // When the query range is non-empty and the current node ends exactly at the start,
3596 // move to the next sibling to find a node that extends beyond the start.
3597 if range.end == query_range.start {
3598 cursor.goto_next_sibling();
3599 range = cursor.node().byte_range();
3600 }
3601 }
3602
3603 let encloses = range.contains_inclusive(query_range)
3604 && (!require_larger || range.len() > query_range.len());
3605 if !encloses {
3606 ascending = true;
3607 if !cursor.goto_parent() {
3608 return false;
3609 }
3610 continue;
3611 } else if ascending {
3612 return true;
3613 }
3614
3615 // Descend into the current node.
3616 if cursor
3617 .goto_first_child_for_byte(query_range.start)
3618 .is_none()
3619 {
3620 return true;
3621 }
3622 }
3623 }
3624
3625 pub fn syntax_ancestor<'a, T: ToOffset>(
3626 &'a self,
3627 range: Range<T>,
3628 ) -> Option<tree_sitter::Node<'a>> {
3629 let range = range.start.to_offset(self)..range.end.to_offset(self);
3630 let mut result: Option<tree_sitter::Node<'a>> = None;
3631 for layer in self
3632 .syntax
3633 .layers_for_range(range.clone(), &self.text, true)
3634 {
3635 let mut cursor = layer.node().walk();
3636
3637 // Find the node that both contains the range and is larger than it.
3638 if !Self::goto_node_enclosing_range(&mut cursor, &range, true) {
3639 continue;
3640 }
3641
3642 let left_node = cursor.node();
3643 let mut layer_result = left_node;
3644
3645 // For an empty range, try to find another node immediately to the right of the range.
3646 if left_node.end_byte() == range.start {
3647 let mut right_node = None;
3648 while !cursor.goto_next_sibling() {
3649 if !cursor.goto_parent() {
3650 break;
3651 }
3652 }
3653
3654 while cursor.node().start_byte() == range.start {
3655 right_node = Some(cursor.node());
3656 if !cursor.goto_first_child() {
3657 break;
3658 }
3659 }
3660
3661 // If there is a candidate node on both sides of the (empty) range, then
3662 // decide between the two by favoring a named node over an anonymous token.
3663 // If both nodes are the same in that regard, favor the right one.
3664 if let Some(right_node) = right_node
3665 && (right_node.is_named() || !left_node.is_named())
3666 {
3667 layer_result = right_node;
3668 }
3669 }
3670
3671 if let Some(previous_result) = &result
3672 && previous_result.byte_range().len() < layer_result.byte_range().len()
3673 {
3674 continue;
3675 }
3676 result = Some(layer_result);
3677 }
3678
3679 result
3680 }
3681
3682 /// Find the previous sibling syntax node at the given range.
3683 ///
3684 /// This function locates the syntax node that precedes the node containing
3685 /// the given range. It searches hierarchically by:
3686 /// 1. Finding the node that contains the given range
3687 /// 2. Looking for the previous sibling at the same tree level
3688 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
3689 ///
3690 /// Returns `None` if there is no previous sibling at any ancestor level.
3691 pub fn syntax_prev_sibling<'a, T: ToOffset>(
3692 &'a self,
3693 range: Range<T>,
3694 ) -> Option<tree_sitter::Node<'a>> {
3695 let range = range.start.to_offset(self)..range.end.to_offset(self);
3696 let mut result: Option<tree_sitter::Node<'a>> = None;
3697
3698 for layer in self
3699 .syntax
3700 .layers_for_range(range.clone(), &self.text, true)
3701 {
3702 let mut cursor = layer.node().walk();
3703
3704 // Find the node that contains the range
3705 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
3706 continue;
3707 }
3708
3709 // Look for the previous sibling, moving up ancestor levels if needed
3710 loop {
3711 if cursor.goto_previous_sibling() {
3712 let layer_result = cursor.node();
3713
3714 if let Some(previous_result) = &result {
3715 if previous_result.byte_range().end < layer_result.byte_range().end {
3716 continue;
3717 }
3718 }
3719 result = Some(layer_result);
3720 break;
3721 }
3722
3723 // No sibling found at this level, try moving up to parent
3724 if !cursor.goto_parent() {
3725 break;
3726 }
3727 }
3728 }
3729
3730 result
3731 }
3732
3733 /// Find the next sibling syntax node at the given range.
3734 ///
3735 /// This function locates the syntax node that follows the node containing
3736 /// the given range. It searches hierarchically by:
3737 /// 1. Finding the node that contains the given range
3738 /// 2. Looking for the next sibling at the same tree level
3739 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
3740 ///
3741 /// Returns `None` if there is no next sibling at any ancestor level.
3742 pub fn syntax_next_sibling<'a, T: ToOffset>(
3743 &'a self,
3744 range: Range<T>,
3745 ) -> Option<tree_sitter::Node<'a>> {
3746 let range = range.start.to_offset(self)..range.end.to_offset(self);
3747 let mut result: Option<tree_sitter::Node<'a>> = None;
3748
3749 for layer in self
3750 .syntax
3751 .layers_for_range(range.clone(), &self.text, true)
3752 {
3753 let mut cursor = layer.node().walk();
3754
3755 // Find the node that contains the range
3756 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
3757 continue;
3758 }
3759
3760 // Look for the next sibling, moving up ancestor levels if needed
3761 loop {
3762 if cursor.goto_next_sibling() {
3763 let layer_result = cursor.node();
3764
3765 if let Some(previous_result) = &result {
3766 if previous_result.byte_range().start > layer_result.byte_range().start {
3767 continue;
3768 }
3769 }
3770 result = Some(layer_result);
3771 break;
3772 }
3773
3774 // No sibling found at this level, try moving up to parent
3775 if !cursor.goto_parent() {
3776 break;
3777 }
3778 }
3779 }
3780
3781 result
3782 }
3783
3784 /// Returns the root syntax node within the given row
3785 pub fn syntax_root_ancestor(&self, position: Anchor) -> Option<tree_sitter::Node<'_>> {
3786 let start_offset = position.to_offset(self);
3787
3788 let row = self.summary_for_anchor::<text::PointUtf16>(&position).row as usize;
3789
3790 let layer = self
3791 .syntax
3792 .layers_for_range(start_offset..start_offset, &self.text, true)
3793 .next()?;
3794
3795 let mut cursor = layer.node().walk();
3796
3797 // Descend to the first leaf that touches the start of the range.
3798 while cursor.goto_first_child_for_byte(start_offset).is_some() {
3799 if cursor.node().end_byte() == start_offset {
3800 cursor.goto_next_sibling();
3801 }
3802 }
3803
3804 // Ascend to the root node within the same row.
3805 while cursor.goto_parent() {
3806 if cursor.node().start_position().row != row {
3807 break;
3808 }
3809 }
3810
3811 Some(cursor.node())
3812 }
3813
3814 /// Returns the outline for the buffer.
3815 ///
3816 /// This method allows passing an optional [`SyntaxTheme`] to
3817 /// syntax-highlight the returned symbols.
3818 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Outline<Anchor> {
3819 Outline::new(self.outline_items_containing(0..self.len(), true, theme))
3820 }
3821
3822 /// Returns all the symbols that contain the given position.
3823 ///
3824 /// This method allows passing an optional [`SyntaxTheme`] to
3825 /// syntax-highlight the returned symbols.
3826 pub fn symbols_containing<T: ToOffset>(
3827 &self,
3828 position: T,
3829 theme: Option<&SyntaxTheme>,
3830 ) -> Vec<OutlineItem<Anchor>> {
3831 let position = position.to_offset(self);
3832 let start = self.clip_offset(position.saturating_sub(1), Bias::Left);
3833 let end = self.clip_offset(position + 1, Bias::Right);
3834 let mut items = self.outline_items_containing(start..end, false, theme);
3835 let mut prev_depth = None;
3836 items.retain(|item| {
3837 let result = prev_depth.is_none_or(|prev_depth| item.depth > prev_depth);
3838 prev_depth = Some(item.depth);
3839 result
3840 });
3841 items
3842 }
3843
3844 pub fn outline_range_containing<T: ToOffset>(&self, range: Range<T>) -> Option<Range<Point>> {
3845 let range = range.to_offset(self);
3846 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3847 grammar.outline_config.as_ref().map(|c| &c.query)
3848 });
3849 let configs = matches
3850 .grammars()
3851 .iter()
3852 .map(|g| g.outline_config.as_ref().unwrap())
3853 .collect::<Vec<_>>();
3854
3855 while let Some(mat) = matches.peek() {
3856 let config = &configs[mat.grammar_index];
3857 let containing_item_node = maybe!({
3858 let item_node = mat.captures.iter().find_map(|cap| {
3859 if cap.index == config.item_capture_ix {
3860 Some(cap.node)
3861 } else {
3862 None
3863 }
3864 })?;
3865
3866 let item_byte_range = item_node.byte_range();
3867 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3868 None
3869 } else {
3870 Some(item_node)
3871 }
3872 });
3873
3874 if let Some(item_node) = containing_item_node {
3875 return Some(
3876 Point::from_ts_point(item_node.start_position())
3877 ..Point::from_ts_point(item_node.end_position()),
3878 );
3879 }
3880
3881 matches.advance();
3882 }
3883 None
3884 }
3885
3886 pub fn outline_items_containing<T: ToOffset>(
3887 &self,
3888 range: Range<T>,
3889 include_extra_context: bool,
3890 theme: Option<&SyntaxTheme>,
3891 ) -> Vec<OutlineItem<Anchor>> {
3892 self.outline_items_containing_internal(
3893 range,
3894 include_extra_context,
3895 theme,
3896 |this, range| this.anchor_after(range.start)..this.anchor_before(range.end),
3897 )
3898 }
3899
3900 pub fn outline_items_as_points_containing<T: ToOffset>(
3901 &self,
3902 range: Range<T>,
3903 include_extra_context: bool,
3904 theme: Option<&SyntaxTheme>,
3905 ) -> Vec<OutlineItem<Point>> {
3906 self.outline_items_containing_internal(range, include_extra_context, theme, |_, range| {
3907 range
3908 })
3909 }
3910
3911 fn outline_items_containing_internal<T: ToOffset, U>(
3912 &self,
3913 range: Range<T>,
3914 include_extra_context: bool,
3915 theme: Option<&SyntaxTheme>,
3916 range_callback: fn(&Self, Range<Point>) -> Range<U>,
3917 ) -> Vec<OutlineItem<U>> {
3918 let range = range.to_offset(self);
3919 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3920 grammar.outline_config.as_ref().map(|c| &c.query)
3921 });
3922
3923 let mut items = Vec::new();
3924 let mut annotation_row_ranges: Vec<Range<u32>> = Vec::new();
3925 while let Some(mat) = matches.peek() {
3926 let config = matches.grammars()[mat.grammar_index]
3927 .outline_config
3928 .as_ref()
3929 .unwrap();
3930 if let Some(item) =
3931 self.next_outline_item(config, &mat, &range, include_extra_context, theme)
3932 {
3933 items.push(item);
3934 } else if let Some(capture) = mat
3935 .captures
3936 .iter()
3937 .find(|capture| Some(capture.index) == config.annotation_capture_ix)
3938 {
3939 let capture_range = capture.node.start_position()..capture.node.end_position();
3940 let mut capture_row_range =
3941 capture_range.start.row as u32..capture_range.end.row as u32;
3942 if capture_range.end.row > capture_range.start.row && capture_range.end.column == 0
3943 {
3944 capture_row_range.end -= 1;
3945 }
3946 if let Some(last_row_range) = annotation_row_ranges.last_mut() {
3947 if last_row_range.end >= capture_row_range.start.saturating_sub(1) {
3948 last_row_range.end = capture_row_range.end;
3949 } else {
3950 annotation_row_ranges.push(capture_row_range);
3951 }
3952 } else {
3953 annotation_row_ranges.push(capture_row_range);
3954 }
3955 }
3956 matches.advance();
3957 }
3958
3959 items.sort_by_key(|item| (item.range.start, Reverse(item.range.end)));
3960
3961 // Assign depths based on containment relationships and convert to anchors.
3962 let mut item_ends_stack = Vec::<Point>::new();
3963 let mut anchor_items = Vec::new();
3964 let mut annotation_row_ranges = annotation_row_ranges.into_iter().peekable();
3965 for item in items {
3966 while let Some(last_end) = item_ends_stack.last().copied() {
3967 if last_end < item.range.end {
3968 item_ends_stack.pop();
3969 } else {
3970 break;
3971 }
3972 }
3973
3974 let mut annotation_row_range = None;
3975 while let Some(next_annotation_row_range) = annotation_row_ranges.peek() {
3976 let row_preceding_item = item.range.start.row.saturating_sub(1);
3977 if next_annotation_row_range.end < row_preceding_item {
3978 annotation_row_ranges.next();
3979 } else {
3980 if next_annotation_row_range.end == row_preceding_item {
3981 annotation_row_range = Some(next_annotation_row_range.clone());
3982 annotation_row_ranges.next();
3983 }
3984 break;
3985 }
3986 }
3987
3988 anchor_items.push(OutlineItem {
3989 depth: item_ends_stack.len(),
3990 range: range_callback(self, item.range.clone()),
3991 source_range_for_text: range_callback(self, item.source_range_for_text.clone()),
3992 text: item.text,
3993 highlight_ranges: item.highlight_ranges,
3994 name_ranges: item.name_ranges,
3995 body_range: item.body_range.map(|r| range_callback(self, r)),
3996 annotation_range: annotation_row_range.map(|annotation_range| {
3997 let point_range = Point::new(annotation_range.start, 0)
3998 ..Point::new(annotation_range.end, self.line_len(annotation_range.end));
3999 range_callback(self, point_range)
4000 }),
4001 });
4002 item_ends_stack.push(item.range.end);
4003 }
4004
4005 anchor_items
4006 }
4007
4008 fn next_outline_item(
4009 &self,
4010 config: &OutlineConfig,
4011 mat: &SyntaxMapMatch,
4012 range: &Range<usize>,
4013 include_extra_context: bool,
4014 theme: Option<&SyntaxTheme>,
4015 ) -> Option<OutlineItem<Point>> {
4016 let item_node = mat.captures.iter().find_map(|cap| {
4017 if cap.index == config.item_capture_ix {
4018 Some(cap.node)
4019 } else {
4020 None
4021 }
4022 })?;
4023
4024 let item_byte_range = item_node.byte_range();
4025 if item_byte_range.end < range.start || item_byte_range.start > range.end {
4026 return None;
4027 }
4028 let item_point_range = Point::from_ts_point(item_node.start_position())
4029 ..Point::from_ts_point(item_node.end_position());
4030
4031 let mut open_point = None;
4032 let mut close_point = None;
4033
4034 let mut buffer_ranges = Vec::new();
4035 let mut add_to_buffer_ranges = |node: tree_sitter::Node, node_is_name| {
4036 let mut range = node.start_byte()..node.end_byte();
4037 let start = node.start_position();
4038 if node.end_position().row > start.row {
4039 range.end = range.start + self.line_len(start.row as u32) as usize - start.column;
4040 }
4041
4042 if !range.is_empty() {
4043 buffer_ranges.push((range, node_is_name));
4044 }
4045 };
4046
4047 for capture in mat.captures {
4048 if capture.index == config.name_capture_ix {
4049 add_to_buffer_ranges(capture.node, true);
4050 } else if Some(capture.index) == config.context_capture_ix
4051 || (Some(capture.index) == config.extra_context_capture_ix && include_extra_context)
4052 {
4053 add_to_buffer_ranges(capture.node, false);
4054 } else {
4055 if Some(capture.index) == config.open_capture_ix {
4056 open_point = Some(Point::from_ts_point(capture.node.end_position()));
4057 } else if Some(capture.index) == config.close_capture_ix {
4058 close_point = Some(Point::from_ts_point(capture.node.start_position()));
4059 }
4060 }
4061 }
4062
4063 if buffer_ranges.is_empty() {
4064 return None;
4065 }
4066 let source_range_for_text =
4067 buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end;
4068
4069 let mut text = String::new();
4070 let mut highlight_ranges = Vec::new();
4071 let mut name_ranges = Vec::new();
4072 let mut chunks = self.chunks(source_range_for_text.clone(), true);
4073 let mut last_buffer_range_end = 0;
4074 for (buffer_range, is_name) in buffer_ranges {
4075 let space_added = !text.is_empty() && buffer_range.start > last_buffer_range_end;
4076 if space_added {
4077 text.push(' ');
4078 }
4079 let before_append_len = text.len();
4080 let mut offset = buffer_range.start;
4081 chunks.seek(buffer_range.clone());
4082 for mut chunk in chunks.by_ref() {
4083 if chunk.text.len() > buffer_range.end - offset {
4084 chunk.text = &chunk.text[0..(buffer_range.end - offset)];
4085 offset = buffer_range.end;
4086 } else {
4087 offset += chunk.text.len();
4088 }
4089 let style = chunk
4090 .syntax_highlight_id
4091 .zip(theme)
4092 .and_then(|(highlight, theme)| highlight.style(theme));
4093 if let Some(style) = style {
4094 let start = text.len();
4095 let end = start + chunk.text.len();
4096 highlight_ranges.push((start..end, style));
4097 }
4098 text.push_str(chunk.text);
4099 if offset >= buffer_range.end {
4100 break;
4101 }
4102 }
4103 if is_name {
4104 let after_append_len = text.len();
4105 let start = if space_added && !name_ranges.is_empty() {
4106 before_append_len - 1
4107 } else {
4108 before_append_len
4109 };
4110 name_ranges.push(start..after_append_len);
4111 }
4112 last_buffer_range_end = buffer_range.end;
4113 }
4114
4115 Some(OutlineItem {
4116 depth: 0, // We'll calculate the depth later
4117 range: item_point_range,
4118 source_range_for_text: source_range_for_text.to_point(self),
4119 text,
4120 highlight_ranges,
4121 name_ranges,
4122 body_range: open_point.zip(close_point).map(|(start, end)| start..end),
4123 annotation_range: None,
4124 })
4125 }
4126
4127 pub fn function_body_fold_ranges<T: ToOffset>(
4128 &self,
4129 within: Range<T>,
4130 ) -> impl Iterator<Item = Range<usize>> + '_ {
4131 self.text_object_ranges(within, TreeSitterOptions::default())
4132 .filter_map(|(range, obj)| (obj == TextObject::InsideFunction).then_some(range))
4133 }
4134
4135 /// For each grammar in the language, runs the provided
4136 /// [`tree_sitter::Query`] against the given range.
4137 pub fn matches(
4138 &self,
4139 range: Range<usize>,
4140 query: fn(&Grammar) -> Option<&tree_sitter::Query>,
4141 ) -> SyntaxMapMatches<'_> {
4142 self.syntax.matches(range, self, query)
4143 }
4144
4145 pub fn all_bracket_ranges(
4146 &self,
4147 range: Range<usize>,
4148 ) -> impl Iterator<Item = BracketMatch> + '_ {
4149 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
4150 grammar.brackets_config.as_ref().map(|c| &c.query)
4151 });
4152 let configs = matches
4153 .grammars()
4154 .iter()
4155 .map(|grammar| grammar.brackets_config.as_ref().unwrap())
4156 .collect::<Vec<_>>();
4157
4158 iter::from_fn(move || {
4159 while let Some(mat) = matches.peek() {
4160 let mut open = None;
4161 let mut close = None;
4162 let config = &configs[mat.grammar_index];
4163 let pattern = &config.patterns[mat.pattern_index];
4164 for capture in mat.captures {
4165 if capture.index == config.open_capture_ix {
4166 open = Some(capture.node.byte_range());
4167 } else if capture.index == config.close_capture_ix {
4168 close = Some(capture.node.byte_range());
4169 }
4170 }
4171
4172 matches.advance();
4173
4174 let Some((open_range, close_range)) = open.zip(close) else {
4175 continue;
4176 };
4177
4178 let bracket_range = open_range.start..=close_range.end;
4179 if !bracket_range.overlaps(&range) {
4180 continue;
4181 }
4182
4183 return Some(BracketMatch {
4184 open_range,
4185 close_range,
4186 newline_only: pattern.newline_only,
4187 });
4188 }
4189 None
4190 })
4191 }
4192
4193 /// Returns bracket range pairs overlapping or adjacent to `range`
4194 pub fn bracket_ranges<T: ToOffset>(
4195 &self,
4196 range: Range<T>,
4197 ) -> impl Iterator<Item = BracketMatch> + '_ {
4198 // Find bracket pairs that *inclusively* contain the given range.
4199 let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self);
4200 self.all_bracket_ranges(range)
4201 .filter(|pair| !pair.newline_only)
4202 }
4203
4204 pub fn debug_variables_query<T: ToOffset>(
4205 &self,
4206 range: Range<T>,
4207 ) -> impl Iterator<Item = (Range<usize>, DebuggerTextObject)> + '_ {
4208 let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self);
4209
4210 let mut matches = self.syntax.matches_with_options(
4211 range.clone(),
4212 &self.text,
4213 TreeSitterOptions::default(),
4214 |grammar| grammar.debug_variables_config.as_ref().map(|c| &c.query),
4215 );
4216
4217 let configs = matches
4218 .grammars()
4219 .iter()
4220 .map(|grammar| grammar.debug_variables_config.as_ref())
4221 .collect::<Vec<_>>();
4222
4223 let mut captures = Vec::<(Range<usize>, DebuggerTextObject)>::new();
4224
4225 iter::from_fn(move || {
4226 loop {
4227 while let Some(capture) = captures.pop() {
4228 if capture.0.overlaps(&range) {
4229 return Some(capture);
4230 }
4231 }
4232
4233 let mat = matches.peek()?;
4234
4235 let Some(config) = configs[mat.grammar_index].as_ref() else {
4236 matches.advance();
4237 continue;
4238 };
4239
4240 for capture in mat.captures {
4241 let Some(ix) = config
4242 .objects_by_capture_ix
4243 .binary_search_by_key(&capture.index, |e| e.0)
4244 .ok()
4245 else {
4246 continue;
4247 };
4248 let text_object = config.objects_by_capture_ix[ix].1;
4249 let byte_range = capture.node.byte_range();
4250
4251 let mut found = false;
4252 for (range, existing) in captures.iter_mut() {
4253 if existing == &text_object {
4254 range.start = range.start.min(byte_range.start);
4255 range.end = range.end.max(byte_range.end);
4256 found = true;
4257 break;
4258 }
4259 }
4260
4261 if !found {
4262 captures.push((byte_range, text_object));
4263 }
4264 }
4265
4266 matches.advance();
4267 }
4268 })
4269 }
4270
4271 pub fn text_object_ranges<T: ToOffset>(
4272 &self,
4273 range: Range<T>,
4274 options: TreeSitterOptions,
4275 ) -> impl Iterator<Item = (Range<usize>, TextObject)> + '_ {
4276 let range =
4277 range.start.to_previous_offset(self)..self.len().min(range.end.to_next_offset(self));
4278
4279 let mut matches =
4280 self.syntax
4281 .matches_with_options(range.clone(), &self.text, options, |grammar| {
4282 grammar.text_object_config.as_ref().map(|c| &c.query)
4283 });
4284
4285 let configs = matches
4286 .grammars()
4287 .iter()
4288 .map(|grammar| grammar.text_object_config.as_ref())
4289 .collect::<Vec<_>>();
4290
4291 let mut captures = Vec::<(Range<usize>, TextObject)>::new();
4292
4293 iter::from_fn(move || {
4294 loop {
4295 while let Some(capture) = captures.pop() {
4296 if capture.0.overlaps(&range) {
4297 return Some(capture);
4298 }
4299 }
4300
4301 let mat = matches.peek()?;
4302
4303 let Some(config) = configs[mat.grammar_index].as_ref() else {
4304 matches.advance();
4305 continue;
4306 };
4307
4308 for capture in mat.captures {
4309 let Some(ix) = config
4310 .text_objects_by_capture_ix
4311 .binary_search_by_key(&capture.index, |e| e.0)
4312 .ok()
4313 else {
4314 continue;
4315 };
4316 let text_object = config.text_objects_by_capture_ix[ix].1;
4317 let byte_range = capture.node.byte_range();
4318
4319 let mut found = false;
4320 for (range, existing) in captures.iter_mut() {
4321 if existing == &text_object {
4322 range.start = range.start.min(byte_range.start);
4323 range.end = range.end.max(byte_range.end);
4324 found = true;
4325 break;
4326 }
4327 }
4328
4329 if !found {
4330 captures.push((byte_range, text_object));
4331 }
4332 }
4333
4334 matches.advance();
4335 }
4336 })
4337 }
4338
4339 /// Returns enclosing bracket ranges containing the given range
4340 pub fn enclosing_bracket_ranges<T: ToOffset>(
4341 &self,
4342 range: Range<T>,
4343 ) -> impl Iterator<Item = BracketMatch> + '_ {
4344 let range = range.start.to_offset(self)..range.end.to_offset(self);
4345
4346 self.bracket_ranges(range.clone()).filter(move |pair| {
4347 pair.open_range.start <= range.start && pair.close_range.end >= range.end
4348 })
4349 }
4350
4351 /// Returns the smallest enclosing bracket ranges containing the given range or None if no brackets contain range
4352 ///
4353 /// Can optionally pass a range_filter to filter the ranges of brackets to consider
4354 pub fn innermost_enclosing_bracket_ranges<T: ToOffset>(
4355 &self,
4356 range: Range<T>,
4357 range_filter: Option<&dyn Fn(Range<usize>, Range<usize>) -> bool>,
4358 ) -> Option<(Range<usize>, Range<usize>)> {
4359 let range = range.start.to_offset(self)..range.end.to_offset(self);
4360
4361 // Get the ranges of the innermost pair of brackets.
4362 let mut result: Option<(Range<usize>, Range<usize>)> = None;
4363
4364 for pair in self.enclosing_bracket_ranges(range) {
4365 if let Some(range_filter) = range_filter
4366 && !range_filter(pair.open_range.clone(), pair.close_range.clone())
4367 {
4368 continue;
4369 }
4370
4371 let len = pair.close_range.end - pair.open_range.start;
4372
4373 if let Some((existing_open, existing_close)) = &result {
4374 let existing_len = existing_close.end - existing_open.start;
4375 if len > existing_len {
4376 continue;
4377 }
4378 }
4379
4380 result = Some((pair.open_range, pair.close_range));
4381 }
4382
4383 result
4384 }
4385
4386 /// Returns anchor ranges for any matches of the redaction query.
4387 /// The buffer can be associated with multiple languages, and the redaction query associated with each
4388 /// will be run on the relevant section of the buffer.
4389 pub fn redacted_ranges<T: ToOffset>(
4390 &self,
4391 range: Range<T>,
4392 ) -> impl Iterator<Item = Range<usize>> + '_ {
4393 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4394 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4395 grammar
4396 .redactions_config
4397 .as_ref()
4398 .map(|config| &config.query)
4399 });
4400
4401 let configs = syntax_matches
4402 .grammars()
4403 .iter()
4404 .map(|grammar| grammar.redactions_config.as_ref())
4405 .collect::<Vec<_>>();
4406
4407 iter::from_fn(move || {
4408 let redacted_range = syntax_matches
4409 .peek()
4410 .and_then(|mat| {
4411 configs[mat.grammar_index].and_then(|config| {
4412 mat.captures
4413 .iter()
4414 .find(|capture| capture.index == config.redaction_capture_ix)
4415 })
4416 })
4417 .map(|mat| mat.node.byte_range());
4418 syntax_matches.advance();
4419 redacted_range
4420 })
4421 }
4422
4423 pub fn injections_intersecting_range<T: ToOffset>(
4424 &self,
4425 range: Range<T>,
4426 ) -> impl Iterator<Item = (Range<usize>, &Arc<Language>)> + '_ {
4427 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4428
4429 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4430 grammar
4431 .injection_config
4432 .as_ref()
4433 .map(|config| &config.query)
4434 });
4435
4436 let configs = syntax_matches
4437 .grammars()
4438 .iter()
4439 .map(|grammar| grammar.injection_config.as_ref())
4440 .collect::<Vec<_>>();
4441
4442 iter::from_fn(move || {
4443 let ranges = syntax_matches.peek().and_then(|mat| {
4444 let config = &configs[mat.grammar_index]?;
4445 let content_capture_range = mat.captures.iter().find_map(|capture| {
4446 if capture.index == config.content_capture_ix {
4447 Some(capture.node.byte_range())
4448 } else {
4449 None
4450 }
4451 })?;
4452 let language = self.language_at(content_capture_range.start)?;
4453 Some((content_capture_range, language))
4454 });
4455 syntax_matches.advance();
4456 ranges
4457 })
4458 }
4459
4460 pub fn runnable_ranges(
4461 &self,
4462 offset_range: Range<usize>,
4463 ) -> impl Iterator<Item = RunnableRange> + '_ {
4464 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4465 grammar.runnable_config.as_ref().map(|config| &config.query)
4466 });
4467
4468 let test_configs = syntax_matches
4469 .grammars()
4470 .iter()
4471 .map(|grammar| grammar.runnable_config.as_ref())
4472 .collect::<Vec<_>>();
4473
4474 iter::from_fn(move || {
4475 loop {
4476 let mat = syntax_matches.peek()?;
4477
4478 let test_range = test_configs[mat.grammar_index].and_then(|test_configs| {
4479 let mut run_range = None;
4480 let full_range = mat.captures.iter().fold(
4481 Range {
4482 start: usize::MAX,
4483 end: 0,
4484 },
4485 |mut acc, next| {
4486 let byte_range = next.node.byte_range();
4487 if acc.start > byte_range.start {
4488 acc.start = byte_range.start;
4489 }
4490 if acc.end < byte_range.end {
4491 acc.end = byte_range.end;
4492 }
4493 acc
4494 },
4495 );
4496 if full_range.start > full_range.end {
4497 // We did not find a full spanning range of this match.
4498 return None;
4499 }
4500 let extra_captures: SmallVec<[_; 1]> =
4501 SmallVec::from_iter(mat.captures.iter().filter_map(|capture| {
4502 test_configs
4503 .extra_captures
4504 .get(capture.index as usize)
4505 .cloned()
4506 .and_then(|tag_name| match tag_name {
4507 RunnableCapture::Named(name) => {
4508 Some((capture.node.byte_range(), name))
4509 }
4510 RunnableCapture::Run => {
4511 let _ = run_range.insert(capture.node.byte_range());
4512 None
4513 }
4514 })
4515 }));
4516 let run_range = run_range?;
4517 let tags = test_configs
4518 .query
4519 .property_settings(mat.pattern_index)
4520 .iter()
4521 .filter_map(|property| {
4522 if *property.key == *"tag" {
4523 property
4524 .value
4525 .as_ref()
4526 .map(|value| RunnableTag(value.to_string().into()))
4527 } else {
4528 None
4529 }
4530 })
4531 .collect();
4532 let extra_captures = extra_captures
4533 .into_iter()
4534 .map(|(range, name)| {
4535 (
4536 name.to_string(),
4537 self.text_for_range(range).collect::<String>(),
4538 )
4539 })
4540 .collect();
4541 // All tags should have the same range.
4542 Some(RunnableRange {
4543 run_range,
4544 full_range,
4545 runnable: Runnable {
4546 tags,
4547 language: mat.language,
4548 buffer: self.remote_id(),
4549 },
4550 extra_captures,
4551 buffer_id: self.remote_id(),
4552 })
4553 });
4554
4555 syntax_matches.advance();
4556 if test_range.is_some() {
4557 // It's fine for us to short-circuit on .peek()? returning None. We don't want to return None from this iter if we
4558 // had a capture that did not contain a run marker, hence we'll just loop around for the next capture.
4559 return test_range;
4560 }
4561 }
4562 })
4563 }
4564
4565 /// Returns selections for remote peers intersecting the given range.
4566 #[allow(clippy::type_complexity)]
4567 pub fn selections_in_range(
4568 &self,
4569 range: Range<Anchor>,
4570 include_local: bool,
4571 ) -> impl Iterator<
4572 Item = (
4573 ReplicaId,
4574 bool,
4575 CursorShape,
4576 impl Iterator<Item = &Selection<Anchor>> + '_,
4577 ),
4578 > + '_ {
4579 self.remote_selections
4580 .iter()
4581 .filter(move |(replica_id, set)| {
4582 (include_local || **replica_id != self.text.replica_id())
4583 && !set.selections.is_empty()
4584 })
4585 .map(move |(replica_id, set)| {
4586 let start_ix = match set.selections.binary_search_by(|probe| {
4587 probe.end.cmp(&range.start, self).then(Ordering::Greater)
4588 }) {
4589 Ok(ix) | Err(ix) => ix,
4590 };
4591 let end_ix = match set.selections.binary_search_by(|probe| {
4592 probe.start.cmp(&range.end, self).then(Ordering::Less)
4593 }) {
4594 Ok(ix) | Err(ix) => ix,
4595 };
4596
4597 (
4598 *replica_id,
4599 set.line_mode,
4600 set.cursor_shape,
4601 set.selections[start_ix..end_ix].iter(),
4602 )
4603 })
4604 }
4605
4606 /// Returns if the buffer contains any diagnostics.
4607 pub fn has_diagnostics(&self) -> bool {
4608 !self.diagnostics.is_empty()
4609 }
4610
4611 /// Returns all the diagnostics intersecting the given range.
4612 pub fn diagnostics_in_range<'a, T, O>(
4613 &'a self,
4614 search_range: Range<T>,
4615 reversed: bool,
4616 ) -> impl 'a + Iterator<Item = DiagnosticEntryRef<'a, O>>
4617 where
4618 T: 'a + Clone + ToOffset,
4619 O: 'a + FromAnchor,
4620 {
4621 let mut iterators: Vec<_> = self
4622 .diagnostics
4623 .iter()
4624 .map(|(_, collection)| {
4625 collection
4626 .range::<T, text::Anchor>(search_range.clone(), self, true, reversed)
4627 .peekable()
4628 })
4629 .collect();
4630
4631 std::iter::from_fn(move || {
4632 let (next_ix, _) = iterators
4633 .iter_mut()
4634 .enumerate()
4635 .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
4636 .min_by(|(_, a), (_, b)| {
4637 let cmp = a
4638 .range
4639 .start
4640 .cmp(&b.range.start, self)
4641 // when range is equal, sort by diagnostic severity
4642 .then(a.diagnostic.severity.cmp(&b.diagnostic.severity))
4643 // and stabilize order with group_id
4644 .then(a.diagnostic.group_id.cmp(&b.diagnostic.group_id));
4645 if reversed { cmp.reverse() } else { cmp }
4646 })?;
4647 iterators[next_ix]
4648 .next()
4649 .map(
4650 |DiagnosticEntryRef { range, diagnostic }| DiagnosticEntryRef {
4651 diagnostic,
4652 range: FromAnchor::from_anchor(&range.start, self)
4653 ..FromAnchor::from_anchor(&range.end, self),
4654 },
4655 )
4656 })
4657 }
4658
4659 /// Raw access to the diagnostic sets. Typically `diagnostic_groups` or `diagnostic_group`
4660 /// should be used instead.
4661 pub fn diagnostic_sets(&self) -> &SmallVec<[(LanguageServerId, DiagnosticSet); 2]> {
4662 &self.diagnostics
4663 }
4664
4665 /// Returns all the diagnostic groups associated with the given
4666 /// language server ID. If no language server ID is provided,
4667 /// all diagnostics groups are returned.
4668 pub fn diagnostic_groups(
4669 &self,
4670 language_server_id: Option<LanguageServerId>,
4671 ) -> Vec<(LanguageServerId, DiagnosticGroup<'_, Anchor>)> {
4672 let mut groups = Vec::new();
4673
4674 if let Some(language_server_id) = language_server_id {
4675 if let Ok(ix) = self
4676 .diagnostics
4677 .binary_search_by_key(&language_server_id, |e| e.0)
4678 {
4679 self.diagnostics[ix]
4680 .1
4681 .groups(language_server_id, &mut groups, self);
4682 }
4683 } else {
4684 for (language_server_id, diagnostics) in self.diagnostics.iter() {
4685 diagnostics.groups(*language_server_id, &mut groups, self);
4686 }
4687 }
4688
4689 groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
4690 let a_start = &group_a.entries[group_a.primary_ix].range.start;
4691 let b_start = &group_b.entries[group_b.primary_ix].range.start;
4692 a_start.cmp(b_start, self).then_with(|| id_a.cmp(id_b))
4693 });
4694
4695 groups
4696 }
4697
4698 /// Returns an iterator over the diagnostics for the given group.
4699 pub fn diagnostic_group<O>(
4700 &self,
4701 group_id: usize,
4702 ) -> impl Iterator<Item = DiagnosticEntryRef<'_, O>> + use<'_, O>
4703 where
4704 O: FromAnchor + 'static,
4705 {
4706 self.diagnostics
4707 .iter()
4708 .flat_map(move |(_, set)| set.group(group_id, self))
4709 }
4710
4711 /// An integer version number that accounts for all updates besides
4712 /// the buffer's text itself (which is versioned via a version vector).
4713 pub fn non_text_state_update_count(&self) -> usize {
4714 self.non_text_state_update_count
4715 }
4716
4717 /// An integer version that changes when the buffer's syntax changes.
4718 pub fn syntax_update_count(&self) -> usize {
4719 self.syntax.update_count()
4720 }
4721
4722 /// Returns a snapshot of underlying file.
4723 pub fn file(&self) -> Option<&Arc<dyn File>> {
4724 self.file.as_ref()
4725 }
4726
4727 pub fn resolve_file_path(&self, include_root: bool, cx: &App) -> Option<String> {
4728 if let Some(file) = self.file() {
4729 if file.path().file_name().is_none() || include_root {
4730 Some(file.full_path(cx).to_string_lossy().into_owned())
4731 } else {
4732 Some(file.path().display(file.path_style(cx)).to_string())
4733 }
4734 } else {
4735 None
4736 }
4737 }
4738
4739 pub fn words_in_range(&self, query: WordsQuery) -> BTreeMap<String, Range<Anchor>> {
4740 let query_str = query.fuzzy_contents;
4741 if query_str.is_some_and(|query| query.is_empty()) {
4742 return BTreeMap::default();
4743 }
4744
4745 let classifier = CharClassifier::new(self.language.clone().map(|language| LanguageScope {
4746 language,
4747 override_id: None,
4748 }));
4749
4750 let mut query_ix = 0;
4751 let query_chars = query_str.map(|query| query.chars().collect::<Vec<_>>());
4752 let query_len = query_chars.as_ref().map_or(0, |query| query.len());
4753
4754 let mut words = BTreeMap::default();
4755 let mut current_word_start_ix = None;
4756 let mut chunk_ix = query.range.start;
4757 for chunk in self.chunks(query.range, false) {
4758 for (i, c) in chunk.text.char_indices() {
4759 let ix = chunk_ix + i;
4760 if classifier.is_word(c) {
4761 if current_word_start_ix.is_none() {
4762 current_word_start_ix = Some(ix);
4763 }
4764
4765 if let Some(query_chars) = &query_chars
4766 && query_ix < query_len
4767 && c.to_lowercase().eq(query_chars[query_ix].to_lowercase())
4768 {
4769 query_ix += 1;
4770 }
4771 continue;
4772 } else if let Some(word_start) = current_word_start_ix.take()
4773 && query_ix == query_len
4774 {
4775 let word_range = self.anchor_before(word_start)..self.anchor_after(ix);
4776 let mut word_text = self.text_for_range(word_start..ix).peekable();
4777 let first_char = word_text
4778 .peek()
4779 .and_then(|first_chunk| first_chunk.chars().next());
4780 // Skip empty and "words" starting with digits as a heuristic to reduce useless completions
4781 if !query.skip_digits
4782 || first_char.is_none_or(|first_char| !first_char.is_digit(10))
4783 {
4784 words.insert(word_text.collect(), word_range);
4785 }
4786 }
4787 query_ix = 0;
4788 }
4789 chunk_ix += chunk.text.len();
4790 }
4791
4792 words
4793 }
4794}
4795
4796pub struct WordsQuery<'a> {
4797 /// Only returns words with all chars from the fuzzy string in them.
4798 pub fuzzy_contents: Option<&'a str>,
4799 /// Skips words that start with a digit.
4800 pub skip_digits: bool,
4801 /// Buffer offset range, to look for words.
4802 pub range: Range<usize>,
4803}
4804
4805fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
4806 indent_size_for_text(text.chars_at(Point::new(row, 0)))
4807}
4808
4809fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
4810 let mut result = IndentSize::spaces(0);
4811 for c in text {
4812 let kind = match c {
4813 ' ' => IndentKind::Space,
4814 '\t' => IndentKind::Tab,
4815 _ => break,
4816 };
4817 if result.len == 0 {
4818 result.kind = kind;
4819 }
4820 result.len += 1;
4821 }
4822 result
4823}
4824
4825impl Clone for BufferSnapshot {
4826 fn clone(&self) -> Self {
4827 Self {
4828 text: self.text.clone(),
4829 syntax: self.syntax.clone(),
4830 file: self.file.clone(),
4831 remote_selections: self.remote_selections.clone(),
4832 diagnostics: self.diagnostics.clone(),
4833 language: self.language.clone(),
4834 non_text_state_update_count: self.non_text_state_update_count,
4835 }
4836 }
4837}
4838
4839impl Deref for BufferSnapshot {
4840 type Target = text::BufferSnapshot;
4841
4842 fn deref(&self) -> &Self::Target {
4843 &self.text
4844 }
4845}
4846
4847unsafe impl Send for BufferChunks<'_> {}
4848
4849impl<'a> BufferChunks<'a> {
4850 pub(crate) fn new(
4851 text: &'a Rope,
4852 range: Range<usize>,
4853 syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
4854 diagnostics: bool,
4855 buffer_snapshot: Option<&'a BufferSnapshot>,
4856 ) -> Self {
4857 let mut highlights = None;
4858 if let Some((captures, highlight_maps)) = syntax {
4859 highlights = Some(BufferChunkHighlights {
4860 captures,
4861 next_capture: None,
4862 stack: Default::default(),
4863 highlight_maps,
4864 })
4865 }
4866
4867 let diagnostic_endpoints = diagnostics.then(|| Vec::new().into_iter().peekable());
4868 let chunks = text.chunks_in_range(range.clone());
4869
4870 let mut this = BufferChunks {
4871 range,
4872 buffer_snapshot,
4873 chunks,
4874 diagnostic_endpoints,
4875 error_depth: 0,
4876 warning_depth: 0,
4877 information_depth: 0,
4878 hint_depth: 0,
4879 unnecessary_depth: 0,
4880 underline: true,
4881 highlights,
4882 };
4883 this.initialize_diagnostic_endpoints();
4884 this
4885 }
4886
4887 /// Seeks to the given byte offset in the buffer.
4888 pub fn seek(&mut self, range: Range<usize>) {
4889 let old_range = std::mem::replace(&mut self.range, range.clone());
4890 self.chunks.set_range(self.range.clone());
4891 if let Some(highlights) = self.highlights.as_mut() {
4892 if old_range.start <= self.range.start && old_range.end >= self.range.end {
4893 // Reuse existing highlights stack, as the new range is a subrange of the old one.
4894 highlights
4895 .stack
4896 .retain(|(end_offset, _)| *end_offset > range.start);
4897 if let Some(capture) = &highlights.next_capture
4898 && range.start >= capture.node.start_byte()
4899 {
4900 let next_capture_end = capture.node.end_byte();
4901 if range.start < next_capture_end {
4902 highlights.stack.push((
4903 next_capture_end,
4904 highlights.highlight_maps[capture.grammar_index].get(capture.index),
4905 ));
4906 }
4907 highlights.next_capture.take();
4908 }
4909 } else if let Some(snapshot) = self.buffer_snapshot {
4910 let (captures, highlight_maps) = snapshot.get_highlights(self.range.clone());
4911 *highlights = BufferChunkHighlights {
4912 captures,
4913 next_capture: None,
4914 stack: Default::default(),
4915 highlight_maps,
4916 };
4917 } else {
4918 // We cannot obtain new highlights for a language-aware buffer iterator, as we don't have a buffer snapshot.
4919 // Seeking such BufferChunks is not supported.
4920 debug_assert!(
4921 false,
4922 "Attempted to seek on a language-aware buffer iterator without associated buffer snapshot"
4923 );
4924 }
4925
4926 highlights.captures.set_byte_range(self.range.clone());
4927 self.initialize_diagnostic_endpoints();
4928 }
4929 }
4930
4931 fn initialize_diagnostic_endpoints(&mut self) {
4932 if let Some(diagnostics) = self.diagnostic_endpoints.as_mut()
4933 && let Some(buffer) = self.buffer_snapshot
4934 {
4935 let mut diagnostic_endpoints = Vec::new();
4936 for entry in buffer.diagnostics_in_range::<_, usize>(self.range.clone(), false) {
4937 diagnostic_endpoints.push(DiagnosticEndpoint {
4938 offset: entry.range.start,
4939 is_start: true,
4940 severity: entry.diagnostic.severity,
4941 is_unnecessary: entry.diagnostic.is_unnecessary,
4942 underline: entry.diagnostic.underline,
4943 });
4944 diagnostic_endpoints.push(DiagnosticEndpoint {
4945 offset: entry.range.end,
4946 is_start: false,
4947 severity: entry.diagnostic.severity,
4948 is_unnecessary: entry.diagnostic.is_unnecessary,
4949 underline: entry.diagnostic.underline,
4950 });
4951 }
4952 diagnostic_endpoints
4953 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
4954 *diagnostics = diagnostic_endpoints.into_iter().peekable();
4955 self.hint_depth = 0;
4956 self.error_depth = 0;
4957 self.warning_depth = 0;
4958 self.information_depth = 0;
4959 }
4960 }
4961
4962 /// The current byte offset in the buffer.
4963 pub fn offset(&self) -> usize {
4964 self.range.start
4965 }
4966
4967 pub fn range(&self) -> Range<usize> {
4968 self.range.clone()
4969 }
4970
4971 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
4972 let depth = match endpoint.severity {
4973 DiagnosticSeverity::ERROR => &mut self.error_depth,
4974 DiagnosticSeverity::WARNING => &mut self.warning_depth,
4975 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
4976 DiagnosticSeverity::HINT => &mut self.hint_depth,
4977 _ => return,
4978 };
4979 if endpoint.is_start {
4980 *depth += 1;
4981 } else {
4982 *depth -= 1;
4983 }
4984
4985 if endpoint.is_unnecessary {
4986 if endpoint.is_start {
4987 self.unnecessary_depth += 1;
4988 } else {
4989 self.unnecessary_depth -= 1;
4990 }
4991 }
4992 }
4993
4994 fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
4995 if self.error_depth > 0 {
4996 Some(DiagnosticSeverity::ERROR)
4997 } else if self.warning_depth > 0 {
4998 Some(DiagnosticSeverity::WARNING)
4999 } else if self.information_depth > 0 {
5000 Some(DiagnosticSeverity::INFORMATION)
5001 } else if self.hint_depth > 0 {
5002 Some(DiagnosticSeverity::HINT)
5003 } else {
5004 None
5005 }
5006 }
5007
5008 fn current_code_is_unnecessary(&self) -> bool {
5009 self.unnecessary_depth > 0
5010 }
5011}
5012
5013impl<'a> Iterator for BufferChunks<'a> {
5014 type Item = Chunk<'a>;
5015
5016 fn next(&mut self) -> Option<Self::Item> {
5017 let mut next_capture_start = usize::MAX;
5018 let mut next_diagnostic_endpoint = usize::MAX;
5019
5020 if let Some(highlights) = self.highlights.as_mut() {
5021 while let Some((parent_capture_end, _)) = highlights.stack.last() {
5022 if *parent_capture_end <= self.range.start {
5023 highlights.stack.pop();
5024 } else {
5025 break;
5026 }
5027 }
5028
5029 if highlights.next_capture.is_none() {
5030 highlights.next_capture = highlights.captures.next();
5031 }
5032
5033 while let Some(capture) = highlights.next_capture.as_ref() {
5034 if self.range.start < capture.node.start_byte() {
5035 next_capture_start = capture.node.start_byte();
5036 break;
5037 } else {
5038 let highlight_id =
5039 highlights.highlight_maps[capture.grammar_index].get(capture.index);
5040 highlights
5041 .stack
5042 .push((capture.node.end_byte(), highlight_id));
5043 highlights.next_capture = highlights.captures.next();
5044 }
5045 }
5046 }
5047
5048 let mut diagnostic_endpoints = std::mem::take(&mut self.diagnostic_endpoints);
5049 if let Some(diagnostic_endpoints) = diagnostic_endpoints.as_mut() {
5050 while let Some(endpoint) = diagnostic_endpoints.peek().copied() {
5051 if endpoint.offset <= self.range.start {
5052 self.update_diagnostic_depths(endpoint);
5053 diagnostic_endpoints.next();
5054 self.underline = endpoint.underline;
5055 } else {
5056 next_diagnostic_endpoint = endpoint.offset;
5057 break;
5058 }
5059 }
5060 }
5061 self.diagnostic_endpoints = diagnostic_endpoints;
5062
5063 if let Some(ChunkBitmaps {
5064 text: chunk,
5065 chars: chars_map,
5066 tabs,
5067 }) = self.chunks.peek_with_bitmaps()
5068 {
5069 let chunk_start = self.range.start;
5070 let mut chunk_end = (self.chunks.offset() + chunk.len())
5071 .min(next_capture_start)
5072 .min(next_diagnostic_endpoint);
5073 let mut highlight_id = None;
5074 if let Some(highlights) = self.highlights.as_ref()
5075 && let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last()
5076 {
5077 chunk_end = chunk_end.min(*parent_capture_end);
5078 highlight_id = Some(*parent_highlight_id);
5079 }
5080 let bit_start = chunk_start - self.chunks.offset();
5081 let bit_end = chunk_end - self.chunks.offset();
5082
5083 let slice = &chunk[bit_start..bit_end];
5084
5085 let mask = 1u128.unbounded_shl(bit_end as u32).wrapping_sub(1);
5086 let tabs = (tabs >> bit_start) & mask;
5087 let chars = (chars_map >> bit_start) & mask;
5088
5089 self.range.start = chunk_end;
5090 if self.range.start == self.chunks.offset() + chunk.len() {
5091 self.chunks.next().unwrap();
5092 }
5093
5094 Some(Chunk {
5095 text: slice,
5096 syntax_highlight_id: highlight_id,
5097 underline: self.underline,
5098 diagnostic_severity: self.current_diagnostic_severity(),
5099 is_unnecessary: self.current_code_is_unnecessary(),
5100 tabs,
5101 chars,
5102 ..Chunk::default()
5103 })
5104 } else {
5105 None
5106 }
5107 }
5108}
5109
5110impl operation_queue::Operation for Operation {
5111 fn lamport_timestamp(&self) -> clock::Lamport {
5112 match self {
5113 Operation::Buffer(_) => {
5114 unreachable!("buffer operations should never be deferred at this layer")
5115 }
5116 Operation::UpdateDiagnostics {
5117 lamport_timestamp, ..
5118 }
5119 | Operation::UpdateSelections {
5120 lamport_timestamp, ..
5121 }
5122 | Operation::UpdateCompletionTriggers {
5123 lamport_timestamp, ..
5124 }
5125 | Operation::UpdateLineEnding {
5126 lamport_timestamp, ..
5127 } => *lamport_timestamp,
5128 }
5129 }
5130}
5131
5132impl Default for Diagnostic {
5133 fn default() -> Self {
5134 Self {
5135 source: Default::default(),
5136 source_kind: DiagnosticSourceKind::Other,
5137 code: None,
5138 code_description: None,
5139 severity: DiagnosticSeverity::ERROR,
5140 message: Default::default(),
5141 markdown: None,
5142 group_id: 0,
5143 is_primary: false,
5144 is_disk_based: false,
5145 is_unnecessary: false,
5146 underline: true,
5147 data: None,
5148 }
5149 }
5150}
5151
5152impl IndentSize {
5153 /// Returns an [`IndentSize`] representing the given spaces.
5154 pub fn spaces(len: u32) -> Self {
5155 Self {
5156 len,
5157 kind: IndentKind::Space,
5158 }
5159 }
5160
5161 /// Returns an [`IndentSize`] representing a tab.
5162 pub fn tab() -> Self {
5163 Self {
5164 len: 1,
5165 kind: IndentKind::Tab,
5166 }
5167 }
5168
5169 /// An iterator over the characters represented by this [`IndentSize`].
5170 pub fn chars(&self) -> impl Iterator<Item = char> {
5171 iter::repeat(self.char()).take(self.len as usize)
5172 }
5173
5174 /// The character representation of this [`IndentSize`].
5175 pub fn char(&self) -> char {
5176 match self.kind {
5177 IndentKind::Space => ' ',
5178 IndentKind::Tab => '\t',
5179 }
5180 }
5181
5182 /// Consumes the current [`IndentSize`] and returns a new one that has
5183 /// been shrunk or enlarged by the given size along the given direction.
5184 pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
5185 match direction {
5186 Ordering::Less => {
5187 if self.kind == size.kind && self.len >= size.len {
5188 self.len -= size.len;
5189 }
5190 }
5191 Ordering::Equal => {}
5192 Ordering::Greater => {
5193 if self.len == 0 {
5194 self = size;
5195 } else if self.kind == size.kind {
5196 self.len += size.len;
5197 }
5198 }
5199 }
5200 self
5201 }
5202
5203 pub fn len_with_expanded_tabs(&self, tab_size: NonZeroU32) -> usize {
5204 match self.kind {
5205 IndentKind::Space => self.len as usize,
5206 IndentKind::Tab => self.len as usize * tab_size.get() as usize,
5207 }
5208 }
5209}
5210
5211#[cfg(any(test, feature = "test-support"))]
5212pub struct TestFile {
5213 pub path: Arc<RelPath>,
5214 pub root_name: String,
5215 pub local_root: Option<PathBuf>,
5216}
5217
5218#[cfg(any(test, feature = "test-support"))]
5219impl File for TestFile {
5220 fn path(&self) -> &Arc<RelPath> {
5221 &self.path
5222 }
5223
5224 fn full_path(&self, _: &gpui::App) -> PathBuf {
5225 PathBuf::from(self.root_name.clone()).join(self.path.as_std_path())
5226 }
5227
5228 fn as_local(&self) -> Option<&dyn LocalFile> {
5229 if self.local_root.is_some() {
5230 Some(self)
5231 } else {
5232 None
5233 }
5234 }
5235
5236 fn disk_state(&self) -> DiskState {
5237 unimplemented!()
5238 }
5239
5240 fn file_name<'a>(&'a self, _: &'a gpui::App) -> &'a str {
5241 self.path().file_name().unwrap_or(self.root_name.as_ref())
5242 }
5243
5244 fn worktree_id(&self, _: &App) -> WorktreeId {
5245 WorktreeId::from_usize(0)
5246 }
5247
5248 fn to_proto(&self, _: &App) -> rpc::proto::File {
5249 unimplemented!()
5250 }
5251
5252 fn is_private(&self) -> bool {
5253 false
5254 }
5255
5256 fn path_style(&self, _cx: &App) -> PathStyle {
5257 PathStyle::local()
5258 }
5259}
5260
5261#[cfg(any(test, feature = "test-support"))]
5262impl LocalFile for TestFile {
5263 fn abs_path(&self, _cx: &App) -> PathBuf {
5264 PathBuf::from(self.local_root.as_ref().unwrap())
5265 .join(&self.root_name)
5266 .join(self.path.as_std_path())
5267 }
5268
5269 fn load(&self, _cx: &App, _options: EncodingOptions) -> Task<Result<(Encoding, String)>> {
5270 unimplemented!()
5271 }
5272
5273 fn load_bytes(&self, _cx: &App) -> Task<Result<Vec<u8>>> {
5274 unimplemented!()
5275 }
5276}
5277
5278pub(crate) fn contiguous_ranges(
5279 values: impl Iterator<Item = u32>,
5280 max_len: usize,
5281) -> impl Iterator<Item = Range<u32>> {
5282 let mut values = values;
5283 let mut current_range: Option<Range<u32>> = None;
5284 std::iter::from_fn(move || {
5285 loop {
5286 if let Some(value) = values.next() {
5287 if let Some(range) = &mut current_range
5288 && value == range.end
5289 && range.len() < max_len
5290 {
5291 range.end += 1;
5292 continue;
5293 }
5294
5295 let prev_range = current_range.clone();
5296 current_range = Some(value..(value + 1));
5297 if prev_range.is_some() {
5298 return prev_range;
5299 }
5300 } else {
5301 return current_range.take();
5302 }
5303 }
5304 })
5305}
5306
5307#[derive(Default, Debug)]
5308pub struct CharClassifier {
5309 scope: Option<LanguageScope>,
5310 scope_context: Option<CharScopeContext>,
5311 ignore_punctuation: bool,
5312}
5313
5314impl CharClassifier {
5315 pub fn new(scope: Option<LanguageScope>) -> Self {
5316 Self {
5317 scope,
5318 scope_context: None,
5319 ignore_punctuation: false,
5320 }
5321 }
5322
5323 pub fn scope_context(self, scope_context: Option<CharScopeContext>) -> Self {
5324 Self {
5325 scope_context,
5326 ..self
5327 }
5328 }
5329
5330 pub fn ignore_punctuation(self, ignore_punctuation: bool) -> Self {
5331 Self {
5332 ignore_punctuation,
5333 ..self
5334 }
5335 }
5336
5337 pub fn is_whitespace(&self, c: char) -> bool {
5338 self.kind(c) == CharKind::Whitespace
5339 }
5340
5341 pub fn is_word(&self, c: char) -> bool {
5342 self.kind(c) == CharKind::Word
5343 }
5344
5345 pub fn is_punctuation(&self, c: char) -> bool {
5346 self.kind(c) == CharKind::Punctuation
5347 }
5348
5349 pub fn kind_with(&self, c: char, ignore_punctuation: bool) -> CharKind {
5350 if c.is_alphanumeric() || c == '_' {
5351 return CharKind::Word;
5352 }
5353
5354 if let Some(scope) = &self.scope {
5355 let characters = match self.scope_context {
5356 Some(CharScopeContext::Completion) => scope.completion_query_characters(),
5357 Some(CharScopeContext::LinkedEdit) => scope.linked_edit_characters(),
5358 None => scope.word_characters(),
5359 };
5360 if let Some(characters) = characters
5361 && characters.contains(&c)
5362 {
5363 return CharKind::Word;
5364 }
5365 }
5366
5367 if c.is_whitespace() {
5368 return CharKind::Whitespace;
5369 }
5370
5371 if ignore_punctuation {
5372 CharKind::Word
5373 } else {
5374 CharKind::Punctuation
5375 }
5376 }
5377
5378 pub fn kind(&self, c: char) -> CharKind {
5379 self.kind_with(c, self.ignore_punctuation)
5380 }
5381}
5382
5383/// Find all of the ranges of whitespace that occur at the ends of lines
5384/// in the given rope.
5385///
5386/// This could also be done with a regex search, but this implementation
5387/// avoids copying text.
5388pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
5389 let mut ranges = Vec::new();
5390
5391 let mut offset = 0;
5392 let mut prev_chunk_trailing_whitespace_range = 0..0;
5393 for chunk in rope.chunks() {
5394 let mut prev_line_trailing_whitespace_range = 0..0;
5395 for (i, line) in chunk.split('\n').enumerate() {
5396 let line_end_offset = offset + line.len();
5397 let trimmed_line_len = line.trim_end_matches([' ', '\t']).len();
5398 let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
5399
5400 if i == 0 && trimmed_line_len == 0 {
5401 trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
5402 }
5403 if !prev_line_trailing_whitespace_range.is_empty() {
5404 ranges.push(prev_line_trailing_whitespace_range);
5405 }
5406
5407 offset = line_end_offset + 1;
5408 prev_line_trailing_whitespace_range = trailing_whitespace_range;
5409 }
5410
5411 offset -= 1;
5412 prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
5413 }
5414
5415 if !prev_chunk_trailing_whitespace_range.is_empty() {
5416 ranges.push(prev_chunk_trailing_whitespace_range);
5417 }
5418
5419 ranges
5420}