1use crate::{
2 DebuggerTextObject, LanguageScope, Outline, OutlineConfig, RunnableCapture, RunnableTag,
3 TextObject, TreeSitterOptions,
4 diagnostic_set::{DiagnosticEntry, DiagnosticEntryRef, DiagnosticGroup},
5 language_settings::{LanguageSettings, language_settings},
6 outline::OutlineItem,
7 syntax_map::{
8 SyntaxLayer, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures, SyntaxMapMatch,
9 SyntaxMapMatches, SyntaxSnapshot, ToTreeSitterPoint,
10 },
11 task_context::RunnableRange,
12 text_diff::text_diff,
13};
14pub use crate::{
15 Grammar, Language, LanguageRegistry,
16 diagnostic_set::DiagnosticSet,
17 highlight_map::{HighlightId, HighlightMap},
18 proto,
19};
20use anyhow::{Context as _, Result};
21use clock::Lamport;
22pub use clock::ReplicaId;
23use collections::HashMap;
24use encodings::{Encoding, EncodingOptions};
25use fs::MTime;
26use futures::channel::oneshot;
27use gpui::{
28 App, AppContext as _, BackgroundExecutor, Context, Entity, EventEmitter, HighlightStyle,
29 SharedString, StyledText, Task, TaskLabel, TextStyle,
30};
31
32use lsp::{LanguageServerId, NumberOrString};
33use parking_lot::Mutex;
34use serde::{Deserialize, Serialize};
35use serde_json::Value;
36use settings::WorktreeId;
37use smallvec::SmallVec;
38use smol::future::yield_now;
39use std::{
40 any::Any,
41 borrow::Cow,
42 cell::Cell,
43 cmp::{self, Ordering, Reverse},
44 collections::{BTreeMap, BTreeSet},
45 future::Future,
46 iter::{self, Iterator, Peekable},
47 mem,
48 num::NonZeroU32,
49 ops::{Deref, Range},
50 path::PathBuf,
51 rc,
52 sync::{Arc, LazyLock},
53 time::{Duration, Instant},
54 vec,
55};
56use sum_tree::TreeMap;
57use text::operation_queue::OperationQueue;
58use text::*;
59pub use text::{
60 Anchor, Bias, Buffer as TextBuffer, BufferId, BufferSnapshot as TextBufferSnapshot, Edit,
61 LineIndent, OffsetRangeExt, OffsetUtf16, Patch, Point, PointUtf16, Rope, Selection,
62 SelectionGoal, Subscription, TextDimension, TextSummary, ToOffset, ToOffsetUtf16, ToPoint,
63 ToPointUtf16, Transaction, TransactionId, Unclipped,
64};
65use theme::{ActiveTheme as _, SyntaxTheme};
66#[cfg(any(test, feature = "test-support"))]
67use util::RandomCharIter;
68use util::{RangeExt, debug_panic, maybe, paths::PathStyle, rel_path::RelPath};
69
70#[cfg(any(test, feature = "test-support"))]
71pub use {tree_sitter_python, tree_sitter_rust, tree_sitter_typescript};
72
73pub use lsp::DiagnosticSeverity;
74
75/// A label for the background task spawned by the buffer to compute
76/// a diff against the contents of its file.
77pub static BUFFER_DIFF_TASK: LazyLock<TaskLabel> = LazyLock::new(TaskLabel::new);
78
79/// Indicate whether a [`Buffer`] has permissions to edit.
80#[derive(PartialEq, Clone, Copy, Debug)]
81pub enum Capability {
82 /// The buffer is a mutable replica.
83 ReadWrite,
84 /// The buffer is a read-only replica.
85 ReadOnly,
86}
87
88pub type BufferRow = u32;
89
90/// An in-memory representation of a source code file, including its text,
91/// syntax trees, git status, and diagnostics.
92pub struct Buffer {
93 text: TextBuffer,
94 branch_state: Option<BufferBranchState>,
95 /// Filesystem state, `None` when there is no path.
96 file: Option<Arc<dyn File>>,
97 /// The mtime of the file when this buffer was last loaded from
98 /// or saved to disk.
99 saved_mtime: Option<MTime>,
100 /// The version vector when this buffer was last loaded from
101 /// or saved to disk.
102 saved_version: clock::Global,
103 preview_version: clock::Global,
104 transaction_depth: usize,
105 was_dirty_before_starting_transaction: Option<bool>,
106 reload_task: Option<Task<Result<()>>>,
107 language: Option<Arc<Language>>,
108 autoindent_requests: Vec<Arc<AutoindentRequest>>,
109 wait_for_autoindent_txs: Vec<oneshot::Sender<()>>,
110 pending_autoindent: Option<Task<()>>,
111 sync_parse_timeout: Duration,
112 syntax_map: Mutex<SyntaxMap>,
113 reparse: Option<Task<()>>,
114 parse_status: (watch::Sender<ParseStatus>, watch::Receiver<ParseStatus>),
115 non_text_state_update_count: usize,
116 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
117 remote_selections: TreeMap<ReplicaId, SelectionSet>,
118 diagnostics_timestamp: clock::Lamport,
119 completion_triggers: BTreeSet<String>,
120 completion_triggers_per_language_server: HashMap<LanguageServerId, BTreeSet<String>>,
121 completion_triggers_timestamp: clock::Lamport,
122 deferred_ops: OperationQueue<Operation>,
123 capability: Capability,
124 has_conflict: bool,
125 /// Memoize calls to has_changes_since(saved_version).
126 /// The contents of a cell are (self.version, has_changes) at the time of a last call.
127 has_unsaved_edits: Cell<(clock::Global, bool)>,
128 change_bits: Vec<rc::Weak<Cell<bool>>>,
129 _subscriptions: Vec<gpui::Subscription>,
130 pub encoding: Arc<Encoding>,
131}
132
133#[derive(Copy, Clone, Debug, PartialEq, Eq)]
134pub enum ParseStatus {
135 Idle,
136 Parsing,
137}
138
139struct BufferBranchState {
140 base_buffer: Entity<Buffer>,
141 merged_operations: Vec<Lamport>,
142}
143
144/// An immutable, cheaply cloneable representation of a fixed
145/// state of a buffer.
146pub struct BufferSnapshot {
147 pub text: text::BufferSnapshot,
148 pub syntax: SyntaxSnapshot,
149 file: Option<Arc<dyn File>>,
150 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
151 remote_selections: TreeMap<ReplicaId, SelectionSet>,
152 language: Option<Arc<Language>>,
153 non_text_state_update_count: usize,
154}
155
156/// The kind and amount of indentation in a particular line. For now,
157/// assumes that indentation is all the same character.
158#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
159pub struct IndentSize {
160 /// The number of bytes that comprise the indentation.
161 pub len: u32,
162 /// The kind of whitespace used for indentation.
163 pub kind: IndentKind,
164}
165
166/// A whitespace character that's used for indentation.
167#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
168pub enum IndentKind {
169 /// An ASCII space character.
170 #[default]
171 Space,
172 /// An ASCII tab character.
173 Tab,
174}
175
176/// The shape of a selection cursor.
177#[derive(Copy, Clone, Debug, Default, PartialEq, Eq)]
178pub enum CursorShape {
179 /// A vertical bar
180 #[default]
181 Bar,
182 /// A block that surrounds the following character
183 Block,
184 /// An underline that runs along the following character
185 Underline,
186 /// A box drawn around the following character
187 Hollow,
188}
189
190impl From<settings::CursorShape> for CursorShape {
191 fn from(shape: settings::CursorShape) -> Self {
192 match shape {
193 settings::CursorShape::Bar => CursorShape::Bar,
194 settings::CursorShape::Block => CursorShape::Block,
195 settings::CursorShape::Underline => CursorShape::Underline,
196 settings::CursorShape::Hollow => CursorShape::Hollow,
197 }
198 }
199}
200
201#[derive(Clone, Debug)]
202struct SelectionSet {
203 line_mode: bool,
204 cursor_shape: CursorShape,
205 selections: Arc<[Selection<Anchor>]>,
206 lamport_timestamp: clock::Lamport,
207}
208
209/// A diagnostic associated with a certain range of a buffer.
210#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
211pub struct Diagnostic {
212 /// The name of the service that produced this diagnostic.
213 pub source: Option<String>,
214 /// A machine-readable code that identifies this diagnostic.
215 pub code: Option<NumberOrString>,
216 pub code_description: Option<lsp::Uri>,
217 /// Whether this diagnostic is a hint, warning, or error.
218 pub severity: DiagnosticSeverity,
219 /// The human-readable message associated with this diagnostic.
220 pub message: String,
221 /// The human-readable message (in markdown format)
222 pub markdown: Option<String>,
223 /// An id that identifies the group to which this diagnostic belongs.
224 ///
225 /// When a language server produces a diagnostic with
226 /// one or more associated diagnostics, those diagnostics are all
227 /// assigned a single group ID.
228 pub group_id: usize,
229 /// Whether this diagnostic is the primary diagnostic for its group.
230 ///
231 /// In a given group, the primary diagnostic is the top-level diagnostic
232 /// returned by the language server. The non-primary diagnostics are the
233 /// associated diagnostics.
234 pub is_primary: bool,
235 /// Whether this diagnostic is considered to originate from an analysis of
236 /// files on disk, as opposed to any unsaved buffer contents. This is a
237 /// property of a given diagnostic source, and is configured for a given
238 /// language server via the [`LspAdapter::disk_based_diagnostic_sources`](crate::LspAdapter::disk_based_diagnostic_sources) method
239 /// for the language server.
240 pub is_disk_based: bool,
241 /// Whether this diagnostic marks unnecessary code.
242 pub is_unnecessary: bool,
243 /// Quick separation of diagnostics groups based by their source.
244 pub source_kind: DiagnosticSourceKind,
245 /// Data from language server that produced this diagnostic. Passed back to the LS when we request code actions for this diagnostic.
246 pub data: Option<Value>,
247 /// Whether to underline the corresponding text range in the editor.
248 pub underline: bool,
249}
250
251#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)]
252pub enum DiagnosticSourceKind {
253 Pulled,
254 Pushed,
255 Other,
256}
257
258/// An operation used to synchronize this buffer with its other replicas.
259#[derive(Clone, Debug, PartialEq)]
260pub enum Operation {
261 /// A text operation.
262 Buffer(text::Operation),
263
264 /// An update to the buffer's diagnostics.
265 UpdateDiagnostics {
266 /// The id of the language server that produced the new diagnostics.
267 server_id: LanguageServerId,
268 /// The diagnostics.
269 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
270 /// The buffer's lamport timestamp.
271 lamport_timestamp: clock::Lamport,
272 },
273
274 /// An update to the most recent selections in this buffer.
275 UpdateSelections {
276 /// The selections.
277 selections: Arc<[Selection<Anchor>]>,
278 /// The buffer's lamport timestamp.
279 lamport_timestamp: clock::Lamport,
280 /// Whether the selections are in 'line mode'.
281 line_mode: bool,
282 /// The [`CursorShape`] associated with these selections.
283 cursor_shape: CursorShape,
284 },
285
286 /// An update to the characters that should trigger autocompletion
287 /// for this buffer.
288 UpdateCompletionTriggers {
289 /// The characters that trigger autocompletion.
290 triggers: Vec<String>,
291 /// The buffer's lamport timestamp.
292 lamport_timestamp: clock::Lamport,
293 /// The language server ID.
294 server_id: LanguageServerId,
295 },
296
297 /// An update to the line ending type of this buffer.
298 UpdateLineEnding {
299 /// The line ending type.
300 line_ending: LineEnding,
301 /// The buffer's lamport timestamp.
302 lamport_timestamp: clock::Lamport,
303 },
304}
305
306/// An event that occurs in a buffer.
307#[derive(Clone, Debug, PartialEq)]
308pub enum BufferEvent {
309 /// The buffer was changed in a way that must be
310 /// propagated to its other replicas.
311 Operation {
312 operation: Operation,
313 is_local: bool,
314 },
315 /// The buffer was edited.
316 Edited,
317 /// The buffer's `dirty` bit changed.
318 DirtyChanged,
319 /// The buffer was saved.
320 Saved,
321 /// The buffer's file was changed on disk.
322 FileHandleChanged,
323 /// The buffer was reloaded.
324 Reloaded,
325 /// The buffer is in need of a reload
326 ReloadNeeded,
327 /// The buffer's language was changed.
328 LanguageChanged,
329 /// The buffer's syntax trees were updated.
330 Reparsed,
331 /// The buffer's diagnostics were updated.
332 DiagnosticsUpdated,
333 /// The buffer gained or lost editing capabilities.
334 CapabilityChanged,
335}
336
337/// The file associated with a buffer.
338pub trait File: Send + Sync + Any {
339 /// Returns the [`LocalFile`] associated with this file, if the
340 /// file is local.
341 fn as_local(&self) -> Option<&dyn LocalFile>;
342
343 /// Returns whether this file is local.
344 fn is_local(&self) -> bool {
345 self.as_local().is_some()
346 }
347
348 /// Returns whether the file is new, exists in storage, or has been deleted. Includes metadata
349 /// only available in some states, such as modification time.
350 fn disk_state(&self) -> DiskState;
351
352 /// Returns the path of this file relative to the worktree's root directory.
353 fn path(&self) -> &Arc<RelPath>;
354
355 /// Returns the path of this file relative to the worktree's parent directory (this means it
356 /// includes the name of the worktree's root folder).
357 fn full_path(&self, cx: &App) -> PathBuf;
358
359 /// Returns the path style of this file.
360 fn path_style(&self, cx: &App) -> PathStyle;
361
362 /// Returns the last component of this handle's absolute path. If this handle refers to the root
363 /// of its worktree, then this method will return the name of the worktree itself.
364 fn file_name<'a>(&'a self, cx: &'a App) -> &'a str;
365
366 /// Returns the id of the worktree to which this file belongs.
367 ///
368 /// This is needed for looking up project-specific settings.
369 fn worktree_id(&self, cx: &App) -> WorktreeId;
370
371 /// Converts this file into a protobuf message.
372 fn to_proto(&self, cx: &App) -> rpc::proto::File;
373
374 /// Return whether Zed considers this to be a private file.
375 fn is_private(&self) -> bool;
376
377 fn encoding(&self) -> Option<Arc<Encoding>> {
378 unimplemented!()
379 }
380}
381
382/// The file's storage status - whether it's stored (`Present`), and if so when it was last
383/// modified. In the case where the file is not stored, it can be either `New` or `Deleted`. In the
384/// UI these two states are distinguished. For example, the buffer tab does not display a deletion
385/// indicator for new files.
386#[derive(Copy, Clone, Debug, PartialEq)]
387pub enum DiskState {
388 /// File created in Zed that has not been saved.
389 New,
390 /// File present on the filesystem.
391 Present { mtime: MTime },
392 /// Deleted file that was previously present.
393 Deleted,
394}
395
396impl DiskState {
397 /// Returns the file's last known modification time on disk.
398 pub fn mtime(self) -> Option<MTime> {
399 match self {
400 DiskState::New => None,
401 DiskState::Present { mtime } => Some(mtime),
402 DiskState::Deleted => None,
403 }
404 }
405
406 pub fn exists(&self) -> bool {
407 match self {
408 DiskState::New => false,
409 DiskState::Present { .. } => true,
410 DiskState::Deleted => false,
411 }
412 }
413}
414
415/// The file associated with a buffer, in the case where the file is on the local disk.
416pub trait LocalFile: File {
417 /// Returns the absolute path of this file
418 fn abs_path(&self, cx: &App) -> PathBuf;
419
420 /// Loads the file contents from disk and returns them as a UTF-8 encoded string.
421 fn load(
422 &self,
423 cx: &App,
424 options: &EncodingOptions,
425 buffer_encoding: Option<Arc<Encoding>>,
426 ) -> Task<Result<String>>;
427
428 /// Loads the file's contents from disk.
429 fn load_bytes(&self, cx: &App) -> Task<Result<Vec<u8>>>;
430}
431
432/// The auto-indent behavior associated with an editing operation.
433/// For some editing operations, each affected line of text has its
434/// indentation recomputed. For other operations, the entire block
435/// of edited text is adjusted uniformly.
436#[derive(Clone, Debug)]
437pub enum AutoindentMode {
438 /// Indent each line of inserted text.
439 EachLine,
440 /// Apply the same indentation adjustment to all of the lines
441 /// in a given insertion.
442 Block {
443 /// The original indentation column of the first line of each
444 /// insertion, if it has been copied.
445 ///
446 /// Knowing this makes it possible to preserve the relative indentation
447 /// of every line in the insertion from when it was copied.
448 ///
449 /// If the original indent column is `a`, and the first line of insertion
450 /// is then auto-indented to column `b`, then every other line of
451 /// the insertion will be auto-indented to column `b - a`
452 original_indent_columns: Vec<Option<u32>>,
453 },
454}
455
456#[derive(Clone)]
457struct AutoindentRequest {
458 before_edit: BufferSnapshot,
459 entries: Vec<AutoindentRequestEntry>,
460 is_block_mode: bool,
461 ignore_empty_lines: bool,
462}
463
464#[derive(Debug, Clone)]
465struct AutoindentRequestEntry {
466 /// A range of the buffer whose indentation should be adjusted.
467 range: Range<Anchor>,
468 /// Whether or not these lines should be considered brand new, for the
469 /// purpose of auto-indent. When text is not new, its indentation will
470 /// only be adjusted if the suggested indentation level has *changed*
471 /// since the edit was made.
472 first_line_is_new: bool,
473 indent_size: IndentSize,
474 original_indent_column: Option<u32>,
475}
476
477#[derive(Debug)]
478struct IndentSuggestion {
479 basis_row: u32,
480 delta: Ordering,
481 within_error: bool,
482}
483
484struct BufferChunkHighlights<'a> {
485 captures: SyntaxMapCaptures<'a>,
486 next_capture: Option<SyntaxMapCapture<'a>>,
487 stack: Vec<(usize, HighlightId)>,
488 highlight_maps: Vec<HighlightMap>,
489}
490
491/// An iterator that yields chunks of a buffer's text, along with their
492/// syntax highlights and diagnostic status.
493pub struct BufferChunks<'a> {
494 buffer_snapshot: Option<&'a BufferSnapshot>,
495 range: Range<usize>,
496 chunks: text::Chunks<'a>,
497 diagnostic_endpoints: Option<Peekable<vec::IntoIter<DiagnosticEndpoint>>>,
498 error_depth: usize,
499 warning_depth: usize,
500 information_depth: usize,
501 hint_depth: usize,
502 unnecessary_depth: usize,
503 underline: bool,
504 highlights: Option<BufferChunkHighlights<'a>>,
505}
506
507/// A chunk of a buffer's text, along with its syntax highlight and
508/// diagnostic status.
509#[derive(Clone, Debug, Default)]
510pub struct Chunk<'a> {
511 /// The text of the chunk.
512 pub text: &'a str,
513 /// The syntax highlighting style of the chunk.
514 pub syntax_highlight_id: Option<HighlightId>,
515 /// The highlight style that has been applied to this chunk in
516 /// the editor.
517 pub highlight_style: Option<HighlightStyle>,
518 /// The severity of diagnostic associated with this chunk, if any.
519 pub diagnostic_severity: Option<DiagnosticSeverity>,
520 /// A bitset of which characters are tabs in this string.
521 pub tabs: u128,
522 /// Bitmap of character indices in this chunk
523 pub chars: u128,
524 /// Whether this chunk of text is marked as unnecessary.
525 pub is_unnecessary: bool,
526 /// Whether this chunk of text was originally a tab character.
527 pub is_tab: bool,
528 /// Whether this chunk of text was originally an inlay.
529 pub is_inlay: bool,
530 /// Whether to underline the corresponding text range in the editor.
531 pub underline: bool,
532}
533
534/// A set of edits to a given version of a buffer, computed asynchronously.
535#[derive(Debug)]
536pub struct Diff {
537 pub base_version: clock::Global,
538 pub line_ending: LineEnding,
539 pub edits: Vec<(Range<usize>, Arc<str>)>,
540}
541
542#[derive(Debug, Clone, Copy)]
543pub(crate) struct DiagnosticEndpoint {
544 offset: usize,
545 is_start: bool,
546 underline: bool,
547 severity: DiagnosticSeverity,
548 is_unnecessary: bool,
549}
550
551/// A class of characters, used for characterizing a run of text.
552#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
553pub enum CharKind {
554 /// Whitespace.
555 Whitespace,
556 /// Punctuation.
557 Punctuation,
558 /// Word.
559 Word,
560}
561
562/// Context for character classification within a specific scope.
563#[derive(Copy, Clone, Eq, PartialEq, Debug)]
564pub enum CharScopeContext {
565 /// Character classification for completion queries.
566 ///
567 /// This context treats certain characters as word constituents that would
568 /// normally be considered punctuation, such as '-' in Tailwind classes
569 /// ("bg-yellow-100") or '.' in import paths ("foo.ts").
570 Completion,
571 /// Character classification for linked edits.
572 ///
573 /// This context handles characters that should be treated as part of
574 /// identifiers during linked editing operations, such as '.' in JSX
575 /// component names like `<Animated.View>`.
576 LinkedEdit,
577}
578
579/// A runnable is a set of data about a region that could be resolved into a task
580pub struct Runnable {
581 pub tags: SmallVec<[RunnableTag; 1]>,
582 pub language: Arc<Language>,
583 pub buffer: BufferId,
584}
585
586#[derive(Default, Clone, Debug)]
587pub struct HighlightedText {
588 pub text: SharedString,
589 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
590}
591
592#[derive(Default, Debug)]
593struct HighlightedTextBuilder {
594 pub text: String,
595 highlights: Vec<(Range<usize>, HighlightStyle)>,
596}
597
598impl HighlightedText {
599 pub fn from_buffer_range<T: ToOffset>(
600 range: Range<T>,
601 snapshot: &text::BufferSnapshot,
602 syntax_snapshot: &SyntaxSnapshot,
603 override_style: Option<HighlightStyle>,
604 syntax_theme: &SyntaxTheme,
605 ) -> Self {
606 let mut highlighted_text = HighlightedTextBuilder::default();
607 highlighted_text.add_text_from_buffer_range(
608 range,
609 snapshot,
610 syntax_snapshot,
611 override_style,
612 syntax_theme,
613 );
614 highlighted_text.build()
615 }
616
617 pub fn to_styled_text(&self, default_style: &TextStyle) -> StyledText {
618 gpui::StyledText::new(self.text.clone())
619 .with_default_highlights(default_style, self.highlights.iter().cloned())
620 }
621
622 /// Returns the first line without leading whitespace unless highlighted
623 /// and a boolean indicating if there are more lines after
624 pub fn first_line_preview(self) -> (Self, bool) {
625 let newline_ix = self.text.find('\n').unwrap_or(self.text.len());
626 let first_line = &self.text[..newline_ix];
627
628 // Trim leading whitespace, unless an edit starts prior to it.
629 let mut preview_start_ix = first_line.len() - first_line.trim_start().len();
630 if let Some((first_highlight_range, _)) = self.highlights.first() {
631 preview_start_ix = preview_start_ix.min(first_highlight_range.start);
632 }
633
634 let preview_text = &first_line[preview_start_ix..];
635 let preview_highlights = self
636 .highlights
637 .into_iter()
638 .skip_while(|(range, _)| range.end <= preview_start_ix)
639 .take_while(|(range, _)| range.start < newline_ix)
640 .filter_map(|(mut range, highlight)| {
641 range.start = range.start.saturating_sub(preview_start_ix);
642 range.end = range.end.min(newline_ix).saturating_sub(preview_start_ix);
643 if range.is_empty() {
644 None
645 } else {
646 Some((range, highlight))
647 }
648 });
649
650 let preview = Self {
651 text: SharedString::new(preview_text),
652 highlights: preview_highlights.collect(),
653 };
654
655 (preview, self.text.len() > newline_ix)
656 }
657}
658
659impl HighlightedTextBuilder {
660 pub fn build(self) -> HighlightedText {
661 HighlightedText {
662 text: self.text.into(),
663 highlights: self.highlights,
664 }
665 }
666
667 pub fn add_text_from_buffer_range<T: ToOffset>(
668 &mut self,
669 range: Range<T>,
670 snapshot: &text::BufferSnapshot,
671 syntax_snapshot: &SyntaxSnapshot,
672 override_style: Option<HighlightStyle>,
673 syntax_theme: &SyntaxTheme,
674 ) {
675 let range = range.to_offset(snapshot);
676 for chunk in Self::highlighted_chunks(range, snapshot, syntax_snapshot) {
677 let start = self.text.len();
678 self.text.push_str(chunk.text);
679 let end = self.text.len();
680
681 if let Some(highlight_style) = chunk
682 .syntax_highlight_id
683 .and_then(|id| id.style(syntax_theme))
684 {
685 let highlight_style = override_style.map_or(highlight_style, |override_style| {
686 highlight_style.highlight(override_style)
687 });
688 self.highlights.push((start..end, highlight_style));
689 } else if let Some(override_style) = override_style {
690 self.highlights.push((start..end, override_style));
691 }
692 }
693 }
694
695 fn highlighted_chunks<'a>(
696 range: Range<usize>,
697 snapshot: &'a text::BufferSnapshot,
698 syntax_snapshot: &'a SyntaxSnapshot,
699 ) -> BufferChunks<'a> {
700 let captures = syntax_snapshot.captures(range.clone(), snapshot, |grammar| {
701 grammar
702 .highlights_config
703 .as_ref()
704 .map(|config| &config.query)
705 });
706
707 let highlight_maps = captures
708 .grammars()
709 .iter()
710 .map(|grammar| grammar.highlight_map())
711 .collect();
712
713 BufferChunks::new(
714 snapshot.as_rope(),
715 range,
716 Some((captures, highlight_maps)),
717 false,
718 None,
719 )
720 }
721}
722
723#[derive(Clone)]
724pub struct EditPreview {
725 old_snapshot: text::BufferSnapshot,
726 applied_edits_snapshot: text::BufferSnapshot,
727 syntax_snapshot: SyntaxSnapshot,
728}
729
730impl EditPreview {
731 pub fn highlight_edits(
732 &self,
733 current_snapshot: &BufferSnapshot,
734 edits: &[(Range<Anchor>, String)],
735 include_deletions: bool,
736 cx: &App,
737 ) -> HighlightedText {
738 let Some(visible_range_in_preview_snapshot) = self.compute_visible_range(edits) else {
739 return HighlightedText::default();
740 };
741
742 let mut highlighted_text = HighlightedTextBuilder::default();
743
744 let mut offset_in_preview_snapshot = visible_range_in_preview_snapshot.start;
745
746 let insertion_highlight_style = HighlightStyle {
747 background_color: Some(cx.theme().status().created_background),
748 ..Default::default()
749 };
750 let deletion_highlight_style = HighlightStyle {
751 background_color: Some(cx.theme().status().deleted_background),
752 ..Default::default()
753 };
754 let syntax_theme = cx.theme().syntax();
755
756 for (range, edit_text) in edits {
757 let edit_new_end_in_preview_snapshot = range
758 .end
759 .bias_right(&self.old_snapshot)
760 .to_offset(&self.applied_edits_snapshot);
761 let edit_start_in_preview_snapshot = edit_new_end_in_preview_snapshot - edit_text.len();
762
763 let unchanged_range_in_preview_snapshot =
764 offset_in_preview_snapshot..edit_start_in_preview_snapshot;
765 if !unchanged_range_in_preview_snapshot.is_empty() {
766 highlighted_text.add_text_from_buffer_range(
767 unchanged_range_in_preview_snapshot,
768 &self.applied_edits_snapshot,
769 &self.syntax_snapshot,
770 None,
771 syntax_theme,
772 );
773 }
774
775 let range_in_current_snapshot = range.to_offset(current_snapshot);
776 if include_deletions && !range_in_current_snapshot.is_empty() {
777 highlighted_text.add_text_from_buffer_range(
778 range_in_current_snapshot,
779 ¤t_snapshot.text,
780 ¤t_snapshot.syntax,
781 Some(deletion_highlight_style),
782 syntax_theme,
783 );
784 }
785
786 if !edit_text.is_empty() {
787 highlighted_text.add_text_from_buffer_range(
788 edit_start_in_preview_snapshot..edit_new_end_in_preview_snapshot,
789 &self.applied_edits_snapshot,
790 &self.syntax_snapshot,
791 Some(insertion_highlight_style),
792 syntax_theme,
793 );
794 }
795
796 offset_in_preview_snapshot = edit_new_end_in_preview_snapshot;
797 }
798
799 highlighted_text.add_text_from_buffer_range(
800 offset_in_preview_snapshot..visible_range_in_preview_snapshot.end,
801 &self.applied_edits_snapshot,
802 &self.syntax_snapshot,
803 None,
804 syntax_theme,
805 );
806
807 highlighted_text.build()
808 }
809
810 fn compute_visible_range(&self, edits: &[(Range<Anchor>, String)]) -> Option<Range<usize>> {
811 let (first, _) = edits.first()?;
812 let (last, _) = edits.last()?;
813
814 let start = first
815 .start
816 .bias_left(&self.old_snapshot)
817 .to_point(&self.applied_edits_snapshot);
818 let end = last
819 .end
820 .bias_right(&self.old_snapshot)
821 .to_point(&self.applied_edits_snapshot);
822
823 // Ensure that the first line of the first edit and the last line of the last edit are always fully visible
824 let range = Point::new(start.row, 0)
825 ..Point::new(end.row, self.applied_edits_snapshot.line_len(end.row));
826
827 Some(range.to_offset(&self.applied_edits_snapshot))
828 }
829}
830
831#[derive(Clone, Debug, PartialEq, Eq)]
832pub struct BracketMatch {
833 pub open_range: Range<usize>,
834 pub close_range: Range<usize>,
835 pub newline_only: bool,
836}
837
838impl Buffer {
839 /// Create a new buffer with the given base text.
840 pub fn local<T: Into<String>>(base_text: T, cx: &Context<Self>) -> Self {
841 Self::build(
842 TextBuffer::new(
843 ReplicaId::LOCAL,
844 cx.entity_id().as_non_zero_u64().into(),
845 base_text.into(),
846 &cx.background_executor(),
847 ),
848 None,
849 Capability::ReadWrite,
850 )
851 }
852
853 /// Replace the text buffer. This function is in contrast to `set_text` in that it does not
854 /// change the buffer's editing state
855 pub fn replace_text_buffer(&mut self, new: TextBuffer, cx: &mut Context<Self>) {
856 self.text = new;
857 self.saved_version = self.version.clone();
858 self.has_unsaved_edits.set((self.version.clone(), false));
859
860 self.was_changed();
861 cx.emit(BufferEvent::DirtyChanged);
862 cx.notify();
863 }
864
865 /// Create a new buffer with the given base text that has proper line endings and other normalization applied.
866 pub fn local_normalized(
867 base_text_normalized: Rope,
868 line_ending: LineEnding,
869 cx: &Context<Self>,
870 ) -> Self {
871 Self::build(
872 TextBuffer::new_normalized(
873 ReplicaId::LOCAL,
874 cx.entity_id().as_non_zero_u64().into(),
875 line_ending,
876 base_text_normalized,
877 ),
878 None,
879 Capability::ReadWrite,
880 )
881 }
882
883 /// Create a new buffer that is a replica of a remote buffer.
884 pub fn remote(
885 remote_id: BufferId,
886 replica_id: ReplicaId,
887 capability: Capability,
888 base_text: impl Into<String>,
889 cx: &BackgroundExecutor,
890 ) -> Self {
891 Self::build(
892 TextBuffer::new(replica_id, remote_id, base_text.into(), cx),
893 None,
894 capability,
895 )
896 }
897
898 /// Create a new buffer that is a replica of a remote buffer, populating its
899 /// state from the given protobuf message.
900 pub fn from_proto(
901 replica_id: ReplicaId,
902 capability: Capability,
903 message: proto::BufferState,
904 file: Option<Arc<dyn File>>,
905 cx: &BackgroundExecutor,
906 ) -> Result<Self> {
907 let buffer_id = BufferId::new(message.id).context("Could not deserialize buffer_id")?;
908 let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text, cx);
909 let mut this = Self::build(buffer, file, capability);
910 this.text.set_line_ending(proto::deserialize_line_ending(
911 rpc::proto::LineEnding::from_i32(message.line_ending).context("missing line_ending")?,
912 ));
913 this.saved_version = proto::deserialize_version(&message.saved_version);
914 this.saved_mtime = message.saved_mtime.map(|time| time.into());
915 Ok(this)
916 }
917
918 /// Serialize the buffer's state to a protobuf message.
919 pub fn to_proto(&self, cx: &App) -> proto::BufferState {
920 proto::BufferState {
921 id: self.remote_id().into(),
922 file: self.file.as_ref().map(|f| f.to_proto(cx)),
923 base_text: self.base_text().to_string(),
924 line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
925 saved_version: proto::serialize_version(&self.saved_version),
926 saved_mtime: self.saved_mtime.map(|time| time.into()),
927 }
928 }
929
930 /// Serialize as protobufs all of the changes to the buffer since the given version.
931 pub fn serialize_ops(
932 &self,
933 since: Option<clock::Global>,
934 cx: &App,
935 ) -> Task<Vec<proto::Operation>> {
936 let mut operations = Vec::new();
937 operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
938
939 operations.extend(self.remote_selections.iter().map(|(_, set)| {
940 proto::serialize_operation(&Operation::UpdateSelections {
941 selections: set.selections.clone(),
942 lamport_timestamp: set.lamport_timestamp,
943 line_mode: set.line_mode,
944 cursor_shape: set.cursor_shape,
945 })
946 }));
947
948 for (server_id, diagnostics) in &self.diagnostics {
949 operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
950 lamport_timestamp: self.diagnostics_timestamp,
951 server_id: *server_id,
952 diagnostics: diagnostics.iter().cloned().collect(),
953 }));
954 }
955
956 for (server_id, completions) in &self.completion_triggers_per_language_server {
957 operations.push(proto::serialize_operation(
958 &Operation::UpdateCompletionTriggers {
959 triggers: completions.iter().cloned().collect(),
960 lamport_timestamp: self.completion_triggers_timestamp,
961 server_id: *server_id,
962 },
963 ));
964 }
965
966 let text_operations = self.text.operations().clone();
967 cx.background_spawn(async move {
968 let since = since.unwrap_or_default();
969 operations.extend(
970 text_operations
971 .iter()
972 .filter(|(_, op)| !since.observed(op.timestamp()))
973 .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
974 );
975 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
976 operations
977 })
978 }
979
980 /// Assign a language to the buffer, returning the buffer.
981 pub fn with_language(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
982 self.set_language(Some(language), cx);
983 self
984 }
985
986 /// Returns the [`Capability`] of this buffer.
987 pub fn capability(&self) -> Capability {
988 self.capability
989 }
990
991 /// Whether this buffer can only be read.
992 pub fn read_only(&self) -> bool {
993 self.capability == Capability::ReadOnly
994 }
995
996 /// Builds a [`Buffer`] with the given underlying [`TextBuffer`], diff base, [`File`] and [`Capability`].
997 pub fn build(buffer: TextBuffer, file: Option<Arc<dyn File>>, capability: Capability) -> Self {
998 let saved_mtime = file.as_ref().and_then(|file| file.disk_state().mtime());
999 let snapshot = buffer.snapshot();
1000 let syntax_map = Mutex::new(SyntaxMap::new(&snapshot));
1001 Self {
1002 saved_mtime,
1003 saved_version: buffer.version(),
1004 preview_version: buffer.version(),
1005 reload_task: None,
1006 transaction_depth: 0,
1007 was_dirty_before_starting_transaction: None,
1008 has_unsaved_edits: Cell::new((buffer.version(), false)),
1009 text: buffer,
1010 branch_state: None,
1011 file,
1012 capability,
1013 syntax_map,
1014 reparse: None,
1015 non_text_state_update_count: 0,
1016 sync_parse_timeout: Duration::from_millis(1),
1017 parse_status: watch::channel(ParseStatus::Idle),
1018 autoindent_requests: Default::default(),
1019 wait_for_autoindent_txs: Default::default(),
1020 pending_autoindent: Default::default(),
1021 language: None,
1022 remote_selections: Default::default(),
1023 diagnostics: Default::default(),
1024 diagnostics_timestamp: Lamport::MIN,
1025 completion_triggers: Default::default(),
1026 completion_triggers_per_language_server: Default::default(),
1027 completion_triggers_timestamp: Lamport::MIN,
1028 deferred_ops: OperationQueue::new(),
1029 has_conflict: false,
1030 change_bits: Default::default(),
1031 _subscriptions: Vec::new(),
1032 encoding: Arc::new(Encoding::new(encodings::UTF_8)),
1033 }
1034 }
1035
1036 pub fn build_snapshot(
1037 text: Rope,
1038 language: Option<Arc<Language>>,
1039 language_registry: Option<Arc<LanguageRegistry>>,
1040 cx: &mut App,
1041 ) -> impl Future<Output = BufferSnapshot> + use<> {
1042 let entity_id = cx.reserve_entity::<Self>().entity_id();
1043 let buffer_id = entity_id.as_non_zero_u64().into();
1044 async move {
1045 let text =
1046 TextBuffer::new_normalized(ReplicaId::LOCAL, buffer_id, Default::default(), text)
1047 .snapshot();
1048 let mut syntax = SyntaxMap::new(&text).snapshot();
1049 if let Some(language) = language.clone() {
1050 let language_registry = language_registry.clone();
1051 syntax.reparse(&text, language_registry, language);
1052 }
1053 BufferSnapshot {
1054 text,
1055 syntax,
1056 file: None,
1057 diagnostics: Default::default(),
1058 remote_selections: Default::default(),
1059 language,
1060 non_text_state_update_count: 0,
1061 }
1062 }
1063 }
1064
1065 pub fn build_empty_snapshot(cx: &mut App) -> BufferSnapshot {
1066 let entity_id = cx.reserve_entity::<Self>().entity_id();
1067 let buffer_id = entity_id.as_non_zero_u64().into();
1068 let text = TextBuffer::new_normalized(
1069 ReplicaId::LOCAL,
1070 buffer_id,
1071 Default::default(),
1072 Rope::new(),
1073 )
1074 .snapshot();
1075 let syntax = SyntaxMap::new(&text).snapshot();
1076 BufferSnapshot {
1077 text,
1078 syntax,
1079 file: None,
1080 diagnostics: Default::default(),
1081 remote_selections: Default::default(),
1082 language: None,
1083 non_text_state_update_count: 0,
1084 }
1085 }
1086
1087 #[cfg(any(test, feature = "test-support"))]
1088 pub fn build_snapshot_sync(
1089 text: Rope,
1090 language: Option<Arc<Language>>,
1091 language_registry: Option<Arc<LanguageRegistry>>,
1092 cx: &mut App,
1093 ) -> BufferSnapshot {
1094 let entity_id = cx.reserve_entity::<Self>().entity_id();
1095 let buffer_id = entity_id.as_non_zero_u64().into();
1096 let text =
1097 TextBuffer::new_normalized(ReplicaId::LOCAL, buffer_id, Default::default(), text)
1098 .snapshot();
1099 let mut syntax = SyntaxMap::new(&text).snapshot();
1100 if let Some(language) = language.clone() {
1101 syntax.reparse(&text, language_registry, language);
1102 }
1103 BufferSnapshot {
1104 text,
1105 syntax,
1106 file: None,
1107 diagnostics: Default::default(),
1108 remote_selections: Default::default(),
1109 language,
1110 non_text_state_update_count: 0,
1111 }
1112 }
1113
1114 /// Retrieve a snapshot of the buffer's current state. This is computationally
1115 /// cheap, and allows reading from the buffer on a background thread.
1116 pub fn snapshot(&self) -> BufferSnapshot {
1117 let text = self.text.snapshot();
1118 let mut syntax_map = self.syntax_map.lock();
1119 syntax_map.interpolate(&text);
1120 let syntax = syntax_map.snapshot();
1121
1122 BufferSnapshot {
1123 text,
1124 syntax,
1125 file: self.file.clone(),
1126 remote_selections: self.remote_selections.clone(),
1127 diagnostics: self.diagnostics.clone(),
1128 language: self.language.clone(),
1129 non_text_state_update_count: self.non_text_state_update_count,
1130 }
1131 }
1132
1133 pub fn branch(&mut self, cx: &mut Context<Self>) -> Entity<Self> {
1134 let this = cx.entity();
1135 cx.new(|cx| {
1136 let mut branch = Self {
1137 branch_state: Some(BufferBranchState {
1138 base_buffer: this.clone(),
1139 merged_operations: Default::default(),
1140 }),
1141 language: self.language.clone(),
1142 has_conflict: self.has_conflict,
1143 has_unsaved_edits: Cell::new(self.has_unsaved_edits.get_mut().clone()),
1144 _subscriptions: vec![cx.subscribe(&this, Self::on_base_buffer_event)],
1145 ..Self::build(self.text.branch(), self.file.clone(), self.capability())
1146 };
1147 if let Some(language_registry) = self.language_registry() {
1148 branch.set_language_registry(language_registry);
1149 }
1150
1151 // Reparse the branch buffer so that we get syntax highlighting immediately.
1152 branch.reparse(cx);
1153
1154 branch
1155 })
1156 }
1157
1158 pub fn preview_edits(
1159 &self,
1160 edits: Arc<[(Range<Anchor>, String)]>,
1161 cx: &App,
1162 ) -> Task<EditPreview> {
1163 let registry = self.language_registry();
1164 let language = self.language().cloned();
1165 let old_snapshot = self.text.snapshot();
1166 let mut branch_buffer = self.text.branch();
1167 let mut syntax_snapshot = self.syntax_map.lock().snapshot();
1168 let executor = cx.background_executor().clone();
1169 cx.background_spawn(async move {
1170 if !edits.is_empty() {
1171 if let Some(language) = language.clone() {
1172 syntax_snapshot.reparse(&old_snapshot, registry.clone(), language);
1173 }
1174
1175 branch_buffer.edit(edits.iter().cloned(), &executor);
1176 let snapshot = branch_buffer.snapshot();
1177 syntax_snapshot.interpolate(&snapshot);
1178
1179 if let Some(language) = language {
1180 syntax_snapshot.reparse(&snapshot, registry, language);
1181 }
1182 }
1183 EditPreview {
1184 old_snapshot,
1185 applied_edits_snapshot: branch_buffer.snapshot(),
1186 syntax_snapshot,
1187 }
1188 })
1189 }
1190
1191 /// Applies all of the changes in this buffer that intersect any of the
1192 /// given `ranges` to its base buffer.
1193 ///
1194 /// If `ranges` is empty, then all changes will be applied. This buffer must
1195 /// be a branch buffer to call this method.
1196 pub fn merge_into_base(&mut self, ranges: Vec<Range<usize>>, cx: &mut Context<Self>) {
1197 let Some(base_buffer) = self.base_buffer() else {
1198 debug_panic!("not a branch buffer");
1199 return;
1200 };
1201
1202 let mut ranges = if ranges.is_empty() {
1203 &[0..usize::MAX]
1204 } else {
1205 ranges.as_slice()
1206 }
1207 .iter()
1208 .peekable();
1209
1210 let mut edits = Vec::new();
1211 for edit in self.edits_since::<usize>(&base_buffer.read(cx).version()) {
1212 let mut is_included = false;
1213 while let Some(range) = ranges.peek() {
1214 if range.end < edit.new.start {
1215 ranges.next().unwrap();
1216 } else {
1217 if range.start <= edit.new.end {
1218 is_included = true;
1219 }
1220 break;
1221 }
1222 }
1223
1224 if is_included {
1225 edits.push((
1226 edit.old.clone(),
1227 self.text_for_range(edit.new.clone()).collect::<String>(),
1228 ));
1229 }
1230 }
1231
1232 let operation = base_buffer.update(cx, |base_buffer, cx| {
1233 // cx.emit(BufferEvent::DiffBaseChanged);
1234 base_buffer.edit(edits, None, cx)
1235 });
1236
1237 if let Some(operation) = operation
1238 && let Some(BufferBranchState {
1239 merged_operations, ..
1240 }) = &mut self.branch_state
1241 {
1242 merged_operations.push(operation);
1243 }
1244 }
1245
1246 fn on_base_buffer_event(
1247 &mut self,
1248 _: Entity<Buffer>,
1249 event: &BufferEvent,
1250 cx: &mut Context<Self>,
1251 ) {
1252 let BufferEvent::Operation { operation, .. } = event else {
1253 return;
1254 };
1255 let Some(BufferBranchState {
1256 merged_operations, ..
1257 }) = &mut self.branch_state
1258 else {
1259 return;
1260 };
1261
1262 let mut operation_to_undo = None;
1263 if let Operation::Buffer(text::Operation::Edit(operation)) = &operation
1264 && let Ok(ix) = merged_operations.binary_search(&operation.timestamp)
1265 {
1266 merged_operations.remove(ix);
1267 operation_to_undo = Some(operation.timestamp);
1268 }
1269
1270 self.apply_ops([operation.clone()], cx);
1271
1272 if let Some(timestamp) = operation_to_undo {
1273 let counts = [(timestamp, u32::MAX)].into_iter().collect();
1274 self.undo_operations(counts, cx);
1275 }
1276 }
1277
1278 #[cfg(test)]
1279 pub(crate) fn as_text_snapshot(&self) -> &text::BufferSnapshot {
1280 &self.text
1281 }
1282
1283 /// Retrieve a snapshot of the buffer's raw text, without any
1284 /// language-related state like the syntax tree or diagnostics.
1285 pub fn text_snapshot(&self) -> text::BufferSnapshot {
1286 self.text.snapshot()
1287 }
1288
1289 /// The file associated with the buffer, if any.
1290 pub fn file(&self) -> Option<&Arc<dyn File>> {
1291 self.file.as_ref()
1292 }
1293
1294 /// The version of the buffer that was last saved or reloaded from disk.
1295 pub fn saved_version(&self) -> &clock::Global {
1296 &self.saved_version
1297 }
1298
1299 /// The mtime of the buffer's file when the buffer was last saved or reloaded from disk.
1300 pub fn saved_mtime(&self) -> Option<MTime> {
1301 self.saved_mtime
1302 }
1303
1304 /// Assign a language to the buffer.
1305 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1306 self.non_text_state_update_count += 1;
1307 self.syntax_map.lock().clear(&self.text);
1308 self.language = language;
1309 self.was_changed();
1310 self.reparse(cx);
1311 cx.emit(BufferEvent::LanguageChanged);
1312 }
1313
1314 /// Assign a language registry to the buffer. This allows the buffer to retrieve
1315 /// other languages if parts of the buffer are written in different languages.
1316 pub fn set_language_registry(&self, language_registry: Arc<LanguageRegistry>) {
1317 self.syntax_map
1318 .lock()
1319 .set_language_registry(language_registry);
1320 }
1321
1322 pub fn language_registry(&self) -> Option<Arc<LanguageRegistry>> {
1323 self.syntax_map.lock().language_registry()
1324 }
1325
1326 /// Assign the line ending type to the buffer.
1327 pub fn set_line_ending(&mut self, line_ending: LineEnding, cx: &mut Context<Self>) {
1328 self.text.set_line_ending(line_ending);
1329
1330 let lamport_timestamp = self.text.lamport_clock.tick();
1331 self.send_operation(
1332 Operation::UpdateLineEnding {
1333 line_ending,
1334 lamport_timestamp,
1335 },
1336 true,
1337 cx,
1338 );
1339 }
1340
1341 /// Assign the buffer a new [`Capability`].
1342 pub fn set_capability(&mut self, capability: Capability, cx: &mut Context<Self>) {
1343 if self.capability != capability {
1344 self.capability = capability;
1345 cx.emit(BufferEvent::CapabilityChanged)
1346 }
1347 }
1348
1349 /// This method is called to signal that the buffer has been saved.
1350 pub fn did_save(
1351 &mut self,
1352 version: clock::Global,
1353 mtime: Option<MTime>,
1354 cx: &mut Context<Self>,
1355 ) {
1356 self.saved_version = version.clone();
1357 self.has_unsaved_edits.set((version, false));
1358 self.has_conflict = false;
1359 self.saved_mtime = mtime;
1360 self.was_changed();
1361 cx.emit(BufferEvent::Saved);
1362 cx.notify();
1363 }
1364
1365 /// Reloads the contents of the buffer from disk.
1366 pub fn reload(&mut self, cx: &Context<Self>) -> oneshot::Receiver<Option<Transaction>> {
1367 let (tx, rx) = futures::channel::oneshot::channel();
1368 let encoding = (*self.encoding).clone();
1369
1370 let buffer_encoding = self.encoding.clone();
1371 let options = EncodingOptions::default();
1372 options.encoding.set(encoding.get());
1373
1374 let prev_version = self.text.version();
1375 self.reload_task = Some(cx.spawn(async move |this, cx| {
1376 let Some((new_mtime, new_text)) = this.update(cx, |this, cx| {
1377 let file = this.file.as_ref()?.as_local()?;
1378
1379 Some((file.disk_state().mtime(), {
1380 file.load(cx, &options, Some(buffer_encoding))
1381 }))
1382 })?
1383 else {
1384 return Ok(());
1385 };
1386
1387 let new_text = new_text.await?;
1388 let diff = this
1389 .update(cx, |this, cx| this.diff(new_text.clone(), cx))?
1390 .await;
1391 this.update(cx, |this, cx| {
1392 if this.version() == diff.base_version {
1393 this.finalize_last_transaction();
1394 this.apply_diff(diff, cx);
1395 tx.send(this.finalize_last_transaction().cloned()).ok();
1396 this.has_conflict = false;
1397 this.did_reload(this.version(), this.line_ending(), new_mtime, cx);
1398 } else {
1399 if !diff.edits.is_empty()
1400 || this
1401 .edits_since::<usize>(&diff.base_version)
1402 .next()
1403 .is_some()
1404 {
1405 this.has_conflict = true;
1406 }
1407
1408 this.did_reload(prev_version, this.line_ending(), this.saved_mtime, cx);
1409 }
1410
1411 this.reload_task.take();
1412 })
1413 }));
1414 rx
1415 }
1416
1417 /// This method is called to signal that the buffer has been reloaded.
1418 pub fn did_reload(
1419 &mut self,
1420 version: clock::Global,
1421 line_ending: LineEnding,
1422 mtime: Option<MTime>,
1423 cx: &mut Context<Self>,
1424 ) {
1425 self.saved_version = version;
1426 self.has_unsaved_edits
1427 .set((self.saved_version.clone(), false));
1428 self.text.set_line_ending(line_ending);
1429 self.saved_mtime = mtime;
1430 cx.emit(BufferEvent::Reloaded);
1431 cx.notify();
1432 }
1433
1434 pub fn replace_file(&mut self, new_file: Arc<dyn File>) {
1435 self.file = Some(new_file);
1436 }
1437 /// Updates the [`File`] backing this buffer. This should be called when
1438 /// the file has changed or has been deleted.
1439 pub fn file_updated(&mut self, new_file: Arc<dyn File>, cx: &mut Context<Self>) {
1440 let was_dirty = self.is_dirty();
1441 let mut file_changed = false;
1442
1443 if let Some(old_file) = self.file.as_ref() {
1444 if new_file.path() != old_file.path() {
1445 file_changed = true;
1446 }
1447
1448 let old_state = old_file.disk_state();
1449 let new_state = new_file.disk_state();
1450 if old_state != new_state {
1451 file_changed = true;
1452 if !was_dirty && matches!(new_state, DiskState::Present { .. }) {
1453 cx.emit(BufferEvent::ReloadNeeded)
1454 }
1455 }
1456 } else {
1457 file_changed = true;
1458 };
1459
1460 self.file = Some(new_file);
1461 if file_changed {
1462 self.was_changed();
1463 self.non_text_state_update_count += 1;
1464 if was_dirty != self.is_dirty() {
1465 cx.emit(BufferEvent::DirtyChanged);
1466 }
1467 cx.emit(BufferEvent::FileHandleChanged);
1468 cx.notify();
1469 }
1470 }
1471
1472 pub fn base_buffer(&self) -> Option<Entity<Self>> {
1473 Some(self.branch_state.as_ref()?.base_buffer.clone())
1474 }
1475
1476 /// Returns the primary [`Language`] assigned to this [`Buffer`].
1477 pub fn language(&self) -> Option<&Arc<Language>> {
1478 self.language.as_ref()
1479 }
1480
1481 /// Returns the [`Language`] at the given location.
1482 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
1483 let offset = position.to_offset(self);
1484 let mut is_first = true;
1485 let start_anchor = self.anchor_before(offset);
1486 let end_anchor = self.anchor_after(offset);
1487 self.syntax_map
1488 .lock()
1489 .layers_for_range(offset..offset, &self.text, false)
1490 .filter(|layer| {
1491 if is_first {
1492 is_first = false;
1493 return true;
1494 }
1495
1496 layer
1497 .included_sub_ranges
1498 .map(|sub_ranges| {
1499 sub_ranges.iter().any(|sub_range| {
1500 let is_before_start = sub_range.end.cmp(&start_anchor, self).is_lt();
1501 let is_after_end = sub_range.start.cmp(&end_anchor, self).is_gt();
1502 !is_before_start && !is_after_end
1503 })
1504 })
1505 .unwrap_or(true)
1506 })
1507 .last()
1508 .map(|info| info.language.clone())
1509 .or_else(|| self.language.clone())
1510 }
1511
1512 /// Returns each [`Language`] for the active syntax layers at the given location.
1513 pub fn languages_at<D: ToOffset>(&self, position: D) -> Vec<Arc<Language>> {
1514 let offset = position.to_offset(self);
1515 let mut languages: Vec<Arc<Language>> = self
1516 .syntax_map
1517 .lock()
1518 .layers_for_range(offset..offset, &self.text, false)
1519 .map(|info| info.language.clone())
1520 .collect();
1521
1522 if languages.is_empty()
1523 && let Some(buffer_language) = self.language()
1524 {
1525 languages.push(buffer_language.clone());
1526 }
1527
1528 languages
1529 }
1530
1531 /// An integer version number that accounts for all updates besides
1532 /// the buffer's text itself (which is versioned via a version vector).
1533 pub fn non_text_state_update_count(&self) -> usize {
1534 self.non_text_state_update_count
1535 }
1536
1537 /// Whether the buffer is being parsed in the background.
1538 #[cfg(any(test, feature = "test-support"))]
1539 pub fn is_parsing(&self) -> bool {
1540 self.reparse.is_some()
1541 }
1542
1543 /// Indicates whether the buffer contains any regions that may be
1544 /// written in a language that hasn't been loaded yet.
1545 pub fn contains_unknown_injections(&self) -> bool {
1546 self.syntax_map.lock().contains_unknown_injections()
1547 }
1548
1549 #[cfg(any(test, feature = "test-support"))]
1550 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
1551 self.sync_parse_timeout = timeout;
1552 }
1553
1554 /// Called after an edit to synchronize the buffer's main parse tree with
1555 /// the buffer's new underlying state.
1556 ///
1557 /// Locks the syntax map and interpolates the edits since the last reparse
1558 /// into the foreground syntax tree.
1559 ///
1560 /// Then takes a stable snapshot of the syntax map before unlocking it.
1561 /// The snapshot with the interpolated edits is sent to a background thread,
1562 /// where we ask Tree-sitter to perform an incremental parse.
1563 ///
1564 /// Meanwhile, in the foreground, we block the main thread for up to 1ms
1565 /// waiting on the parse to complete. As soon as it completes, we proceed
1566 /// synchronously, unless a 1ms timeout elapses.
1567 ///
1568 /// If we time out waiting on the parse, we spawn a second task waiting
1569 /// until the parse does complete and return with the interpolated tree still
1570 /// in the foreground. When the background parse completes, call back into
1571 /// the main thread and assign the foreground parse state.
1572 ///
1573 /// If the buffer or grammar changed since the start of the background parse,
1574 /// initiate an additional reparse recursively. To avoid concurrent parses
1575 /// for the same buffer, we only initiate a new parse if we are not already
1576 /// parsing in the background.
1577 pub fn reparse(&mut self, cx: &mut Context<Self>) {
1578 if self.reparse.is_some() {
1579 return;
1580 }
1581 let language = if let Some(language) = self.language.clone() {
1582 language
1583 } else {
1584 return;
1585 };
1586
1587 let text = self.text_snapshot();
1588 let parsed_version = self.version();
1589
1590 let mut syntax_map = self.syntax_map.lock();
1591 syntax_map.interpolate(&text);
1592 let language_registry = syntax_map.language_registry();
1593 let mut syntax_snapshot = syntax_map.snapshot();
1594 drop(syntax_map);
1595
1596 let parse_task = cx.background_spawn({
1597 let language = language.clone();
1598 let language_registry = language_registry.clone();
1599 async move {
1600 syntax_snapshot.reparse(&text, language_registry, language);
1601 syntax_snapshot
1602 }
1603 });
1604
1605 self.parse_status.0.send(ParseStatus::Parsing).unwrap();
1606 match cx
1607 .background_executor()
1608 .block_with_timeout(self.sync_parse_timeout, parse_task)
1609 {
1610 Ok(new_syntax_snapshot) => {
1611 self.did_finish_parsing(new_syntax_snapshot, cx);
1612 self.reparse = None;
1613 }
1614 Err(parse_task) => {
1615 // todo(lw): hot foreground spawn
1616 self.reparse = Some(cx.spawn(async move |this, cx| {
1617 let new_syntax_map = cx.background_spawn(parse_task).await;
1618 this.update(cx, move |this, cx| {
1619 let grammar_changed = || {
1620 this.language.as_ref().is_none_or(|current_language| {
1621 !Arc::ptr_eq(&language, current_language)
1622 })
1623 };
1624 let language_registry_changed = || {
1625 new_syntax_map.contains_unknown_injections()
1626 && language_registry.is_some_and(|registry| {
1627 registry.version() != new_syntax_map.language_registry_version()
1628 })
1629 };
1630 let parse_again = this.version.changed_since(&parsed_version)
1631 || language_registry_changed()
1632 || grammar_changed();
1633 this.did_finish_parsing(new_syntax_map, cx);
1634 this.reparse = None;
1635 if parse_again {
1636 this.reparse(cx);
1637 }
1638 })
1639 .ok();
1640 }));
1641 }
1642 }
1643 }
1644
1645 fn did_finish_parsing(&mut self, syntax_snapshot: SyntaxSnapshot, cx: &mut Context<Self>) {
1646 self.was_changed();
1647 self.non_text_state_update_count += 1;
1648 self.syntax_map.lock().did_parse(syntax_snapshot);
1649 self.request_autoindent(cx);
1650 self.parse_status.0.send(ParseStatus::Idle).unwrap();
1651 cx.emit(BufferEvent::Reparsed);
1652 cx.notify();
1653 }
1654
1655 pub fn parse_status(&self) -> watch::Receiver<ParseStatus> {
1656 self.parse_status.1.clone()
1657 }
1658
1659 /// Assign to the buffer a set of diagnostics created by a given language server.
1660 pub fn update_diagnostics(
1661 &mut self,
1662 server_id: LanguageServerId,
1663 diagnostics: DiagnosticSet,
1664 cx: &mut Context<Self>,
1665 ) {
1666 let lamport_timestamp = self.text.lamport_clock.tick();
1667 let op = Operation::UpdateDiagnostics {
1668 server_id,
1669 diagnostics: diagnostics.iter().cloned().collect(),
1670 lamport_timestamp,
1671 };
1672
1673 self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
1674 self.send_operation(op, true, cx);
1675 }
1676
1677 pub fn buffer_diagnostics(
1678 &self,
1679 for_server: Option<LanguageServerId>,
1680 ) -> Vec<&DiagnosticEntry<Anchor>> {
1681 match for_server {
1682 Some(server_id) => match self.diagnostics.binary_search_by_key(&server_id, |v| v.0) {
1683 Ok(idx) => self.diagnostics[idx].1.iter().collect(),
1684 Err(_) => Vec::new(),
1685 },
1686 None => self
1687 .diagnostics
1688 .iter()
1689 .flat_map(|(_, diagnostic_set)| diagnostic_set.iter())
1690 .collect(),
1691 }
1692 }
1693
1694 fn request_autoindent(&mut self, cx: &mut Context<Self>) {
1695 if let Some(indent_sizes) = self.compute_autoindents() {
1696 let indent_sizes = cx.background_spawn(indent_sizes);
1697 match cx
1698 .background_executor()
1699 .block_with_timeout(Duration::from_micros(500), indent_sizes)
1700 {
1701 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
1702 Err(indent_sizes) => {
1703 self.pending_autoindent = Some(cx.spawn(async move |this, cx| {
1704 let indent_sizes = indent_sizes.await;
1705 this.update(cx, |this, cx| {
1706 this.apply_autoindents(indent_sizes, cx);
1707 })
1708 .ok();
1709 }));
1710 }
1711 }
1712 } else {
1713 self.autoindent_requests.clear();
1714 for tx in self.wait_for_autoindent_txs.drain(..) {
1715 tx.send(()).ok();
1716 }
1717 }
1718 }
1719
1720 fn compute_autoindents(
1721 &self,
1722 ) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>> + use<>> {
1723 let max_rows_between_yields = 100;
1724 let snapshot = self.snapshot();
1725 if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
1726 return None;
1727 }
1728
1729 let autoindent_requests = self.autoindent_requests.clone();
1730 Some(async move {
1731 let mut indent_sizes = BTreeMap::<u32, (IndentSize, bool)>::new();
1732 for request in autoindent_requests {
1733 // Resolve each edited range to its row in the current buffer and in the
1734 // buffer before this batch of edits.
1735 let mut row_ranges = Vec::new();
1736 let mut old_to_new_rows = BTreeMap::new();
1737 let mut language_indent_sizes_by_new_row = Vec::new();
1738 for entry in &request.entries {
1739 let position = entry.range.start;
1740 let new_row = position.to_point(&snapshot).row;
1741 let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
1742 language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
1743
1744 if !entry.first_line_is_new {
1745 let old_row = position.to_point(&request.before_edit).row;
1746 old_to_new_rows.insert(old_row, new_row);
1747 }
1748 row_ranges.push((new_row..new_end_row, entry.original_indent_column));
1749 }
1750
1751 // Build a map containing the suggested indentation for each of the edited lines
1752 // with respect to the state of the buffer before these edits. This map is keyed
1753 // by the rows for these lines in the current state of the buffer.
1754 let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
1755 let old_edited_ranges =
1756 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
1757 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1758 let mut language_indent_size = IndentSize::default();
1759 for old_edited_range in old_edited_ranges {
1760 let suggestions = request
1761 .before_edit
1762 .suggest_autoindents(old_edited_range.clone())
1763 .into_iter()
1764 .flatten();
1765 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1766 if let Some(suggestion) = suggestion {
1767 let new_row = *old_to_new_rows.get(&old_row).unwrap();
1768
1769 // Find the indent size based on the language for this row.
1770 while let Some((row, size)) = language_indent_sizes.peek() {
1771 if *row > new_row {
1772 break;
1773 }
1774 language_indent_size = *size;
1775 language_indent_sizes.next();
1776 }
1777
1778 let suggested_indent = old_to_new_rows
1779 .get(&suggestion.basis_row)
1780 .and_then(|from_row| {
1781 Some(old_suggestions.get(from_row).copied()?.0)
1782 })
1783 .unwrap_or_else(|| {
1784 request
1785 .before_edit
1786 .indent_size_for_line(suggestion.basis_row)
1787 })
1788 .with_delta(suggestion.delta, language_indent_size);
1789 old_suggestions
1790 .insert(new_row, (suggested_indent, suggestion.within_error));
1791 }
1792 }
1793 yield_now().await;
1794 }
1795
1796 // Compute new suggestions for each line, but only include them in the result
1797 // if they differ from the old suggestion for that line.
1798 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1799 let mut language_indent_size = IndentSize::default();
1800 for (row_range, original_indent_column) in row_ranges {
1801 let new_edited_row_range = if request.is_block_mode {
1802 row_range.start..row_range.start + 1
1803 } else {
1804 row_range.clone()
1805 };
1806
1807 let suggestions = snapshot
1808 .suggest_autoindents(new_edited_row_range.clone())
1809 .into_iter()
1810 .flatten();
1811 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
1812 if let Some(suggestion) = suggestion {
1813 // Find the indent size based on the language for this row.
1814 while let Some((row, size)) = language_indent_sizes.peek() {
1815 if *row > new_row {
1816 break;
1817 }
1818 language_indent_size = *size;
1819 language_indent_sizes.next();
1820 }
1821
1822 let suggested_indent = indent_sizes
1823 .get(&suggestion.basis_row)
1824 .copied()
1825 .map(|e| e.0)
1826 .unwrap_or_else(|| {
1827 snapshot.indent_size_for_line(suggestion.basis_row)
1828 })
1829 .with_delta(suggestion.delta, language_indent_size);
1830
1831 if old_suggestions.get(&new_row).is_none_or(
1832 |(old_indentation, was_within_error)| {
1833 suggested_indent != *old_indentation
1834 && (!suggestion.within_error || *was_within_error)
1835 },
1836 ) {
1837 indent_sizes.insert(
1838 new_row,
1839 (suggested_indent, request.ignore_empty_lines),
1840 );
1841 }
1842 }
1843 }
1844
1845 if let (true, Some(original_indent_column)) =
1846 (request.is_block_mode, original_indent_column)
1847 {
1848 let new_indent =
1849 if let Some((indent, _)) = indent_sizes.get(&row_range.start) {
1850 *indent
1851 } else {
1852 snapshot.indent_size_for_line(row_range.start)
1853 };
1854 let delta = new_indent.len as i64 - original_indent_column as i64;
1855 if delta != 0 {
1856 for row in row_range.skip(1) {
1857 indent_sizes.entry(row).or_insert_with(|| {
1858 let mut size = snapshot.indent_size_for_line(row);
1859 if size.kind == new_indent.kind {
1860 match delta.cmp(&0) {
1861 Ordering::Greater => size.len += delta as u32,
1862 Ordering::Less => {
1863 size.len = size.len.saturating_sub(-delta as u32)
1864 }
1865 Ordering::Equal => {}
1866 }
1867 }
1868 (size, request.ignore_empty_lines)
1869 });
1870 }
1871 }
1872 }
1873
1874 yield_now().await;
1875 }
1876 }
1877
1878 indent_sizes
1879 .into_iter()
1880 .filter_map(|(row, (indent, ignore_empty_lines))| {
1881 if ignore_empty_lines && snapshot.line_len(row) == 0 {
1882 None
1883 } else {
1884 Some((row, indent))
1885 }
1886 })
1887 .collect()
1888 })
1889 }
1890
1891 fn apply_autoindents(
1892 &mut self,
1893 indent_sizes: BTreeMap<u32, IndentSize>,
1894 cx: &mut Context<Self>,
1895 ) {
1896 self.autoindent_requests.clear();
1897 for tx in self.wait_for_autoindent_txs.drain(..) {
1898 tx.send(()).ok();
1899 }
1900
1901 let edits: Vec<_> = indent_sizes
1902 .into_iter()
1903 .filter_map(|(row, indent_size)| {
1904 let current_size = indent_size_for_line(self, row);
1905 Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
1906 })
1907 .collect();
1908
1909 let preserve_preview = self.preserve_preview();
1910 self.edit(edits, None, cx);
1911 if preserve_preview {
1912 self.refresh_preview();
1913 }
1914 }
1915
1916 /// Create a minimal edit that will cause the given row to be indented
1917 /// with the given size. After applying this edit, the length of the line
1918 /// will always be at least `new_size.len`.
1919 pub fn edit_for_indent_size_adjustment(
1920 row: u32,
1921 current_size: IndentSize,
1922 new_size: IndentSize,
1923 ) -> Option<(Range<Point>, String)> {
1924 if new_size.kind == current_size.kind {
1925 match new_size.len.cmp(¤t_size.len) {
1926 Ordering::Greater => {
1927 let point = Point::new(row, 0);
1928 Some((
1929 point..point,
1930 iter::repeat(new_size.char())
1931 .take((new_size.len - current_size.len) as usize)
1932 .collect::<String>(),
1933 ))
1934 }
1935
1936 Ordering::Less => Some((
1937 Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
1938 String::new(),
1939 )),
1940
1941 Ordering::Equal => None,
1942 }
1943 } else {
1944 Some((
1945 Point::new(row, 0)..Point::new(row, current_size.len),
1946 iter::repeat(new_size.char())
1947 .take(new_size.len as usize)
1948 .collect::<String>(),
1949 ))
1950 }
1951 }
1952
1953 /// Spawns a background task that asynchronously computes a `Diff` between the buffer's text
1954 /// and the given new text.
1955 pub fn diff(&self, mut new_text: String, cx: &App) -> Task<Diff> {
1956 let old_text = self.as_rope().clone();
1957 let base_version = self.version();
1958 cx.background_executor()
1959 .spawn_labeled(*BUFFER_DIFF_TASK, async move {
1960 let old_text = old_text.to_string();
1961 let line_ending = LineEnding::detect(&new_text);
1962 LineEnding::normalize(&mut new_text);
1963 let edits = text_diff(&old_text, &new_text);
1964 Diff {
1965 base_version,
1966 line_ending,
1967 edits,
1968 }
1969 })
1970 }
1971
1972 /// Spawns a background task that searches the buffer for any whitespace
1973 /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
1974 pub fn remove_trailing_whitespace(&self, cx: &App) -> Task<Diff> {
1975 let old_text = self.as_rope().clone();
1976 let line_ending = self.line_ending();
1977 let base_version = self.version();
1978 cx.background_spawn(async move {
1979 let ranges = trailing_whitespace_ranges(&old_text);
1980 let empty = Arc::<str>::from("");
1981 Diff {
1982 base_version,
1983 line_ending,
1984 edits: ranges
1985 .into_iter()
1986 .map(|range| (range, empty.clone()))
1987 .collect(),
1988 }
1989 })
1990 }
1991
1992 /// Ensures that the buffer ends with a single newline character, and
1993 /// no other whitespace. Skips if the buffer is empty.
1994 pub fn ensure_final_newline(&mut self, cx: &mut Context<Self>) {
1995 let len = self.len();
1996 if len == 0 {
1997 return;
1998 }
1999 let mut offset = len;
2000 for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
2001 let non_whitespace_len = chunk
2002 .trim_end_matches(|c: char| c.is_ascii_whitespace())
2003 .len();
2004 offset -= chunk.len();
2005 offset += non_whitespace_len;
2006 if non_whitespace_len != 0 {
2007 if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
2008 return;
2009 }
2010 break;
2011 }
2012 }
2013 self.edit([(offset..len, "\n")], None, cx);
2014 }
2015
2016 /// Applies a diff to the buffer. If the buffer has changed since the given diff was
2017 /// calculated, then adjust the diff to account for those changes, and discard any
2018 /// parts of the diff that conflict with those changes.
2019 pub fn apply_diff(&mut self, diff: Diff, cx: &mut Context<Self>) -> Option<TransactionId> {
2020 let snapshot = self.snapshot();
2021 let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
2022 let mut delta = 0;
2023 let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
2024 while let Some(edit_since) = edits_since.peek() {
2025 // If the edit occurs after a diff hunk, then it does not
2026 // affect that hunk.
2027 if edit_since.old.start > range.end {
2028 break;
2029 }
2030 // If the edit precedes the diff hunk, then adjust the hunk
2031 // to reflect the edit.
2032 else if edit_since.old.end < range.start {
2033 delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
2034 edits_since.next();
2035 }
2036 // If the edit intersects a diff hunk, then discard that hunk.
2037 else {
2038 return None;
2039 }
2040 }
2041
2042 let start = (range.start as i64 + delta) as usize;
2043 let end = (range.end as i64 + delta) as usize;
2044 Some((start..end, new_text))
2045 });
2046
2047 self.start_transaction();
2048 self.text.set_line_ending(diff.line_ending);
2049 self.edit(adjusted_edits, None, cx);
2050 self.end_transaction(cx)
2051 }
2052
2053 pub fn has_unsaved_edits(&self) -> bool {
2054 let (last_version, has_unsaved_edits) = self.has_unsaved_edits.take();
2055
2056 if last_version == self.version {
2057 self.has_unsaved_edits
2058 .set((last_version, has_unsaved_edits));
2059 return has_unsaved_edits;
2060 }
2061
2062 let has_edits = self.has_edits_since(&self.saved_version);
2063 self.has_unsaved_edits
2064 .set((self.version.clone(), has_edits));
2065 has_edits
2066 }
2067
2068 /// Checks if the buffer has unsaved changes.
2069 pub fn is_dirty(&self) -> bool {
2070 if self.capability == Capability::ReadOnly {
2071 return false;
2072 }
2073 if self.has_conflict {
2074 return true;
2075 }
2076 match self.file.as_ref().map(|f| f.disk_state()) {
2077 Some(DiskState::New) | Some(DiskState::Deleted) => {
2078 !self.is_empty() && self.has_unsaved_edits()
2079 }
2080 _ => self.has_unsaved_edits(),
2081 }
2082 }
2083
2084 /// Checks if the buffer and its file have both changed since the buffer
2085 /// was last saved or reloaded.
2086 pub fn has_conflict(&self) -> bool {
2087 if self.has_conflict {
2088 return true;
2089 }
2090 let Some(file) = self.file.as_ref() else {
2091 return false;
2092 };
2093 match file.disk_state() {
2094 DiskState::New => false,
2095 DiskState::Present { mtime } => match self.saved_mtime {
2096 Some(saved_mtime) => {
2097 mtime.bad_is_greater_than(saved_mtime) && self.has_unsaved_edits()
2098 }
2099 None => true,
2100 },
2101 DiskState::Deleted => false,
2102 }
2103 }
2104
2105 /// Gets a [`Subscription`] that tracks all of the changes to the buffer's text.
2106 pub fn subscribe(&mut self) -> Subscription {
2107 self.text.subscribe()
2108 }
2109
2110 /// Adds a bit to the list of bits that are set when the buffer's text changes.
2111 ///
2112 /// This allows downstream code to check if the buffer's text has changed without
2113 /// waiting for an effect cycle, which would be required if using eents.
2114 pub fn record_changes(&mut self, bit: rc::Weak<Cell<bool>>) {
2115 if let Err(ix) = self
2116 .change_bits
2117 .binary_search_by_key(&rc::Weak::as_ptr(&bit), rc::Weak::as_ptr)
2118 {
2119 self.change_bits.insert(ix, bit);
2120 }
2121 }
2122
2123 /// Set the change bit for all "listeners".
2124 fn was_changed(&mut self) {
2125 self.change_bits.retain(|change_bit| {
2126 change_bit
2127 .upgrade()
2128 .inspect(|bit| {
2129 _ = bit.replace(true);
2130 })
2131 .is_some()
2132 });
2133 }
2134
2135 /// Starts a transaction, if one is not already in-progress. When undoing or
2136 /// redoing edits, all of the edits performed within a transaction are undone
2137 /// or redone together.
2138 pub fn start_transaction(&mut self) -> Option<TransactionId> {
2139 self.start_transaction_at(Instant::now())
2140 }
2141
2142 /// Starts a transaction, providing the current time. Subsequent transactions
2143 /// that occur within a short period of time will be grouped together. This
2144 /// is controlled by the buffer's undo grouping duration.
2145 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
2146 self.transaction_depth += 1;
2147 if self.was_dirty_before_starting_transaction.is_none() {
2148 self.was_dirty_before_starting_transaction = Some(self.is_dirty());
2149 }
2150 self.text.start_transaction_at(now)
2151 }
2152
2153 /// Terminates the current transaction, if this is the outermost transaction.
2154 pub fn end_transaction(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2155 self.end_transaction_at(Instant::now(), cx)
2156 }
2157
2158 /// Terminates the current transaction, providing the current time. Subsequent transactions
2159 /// that occur within a short period of time will be grouped together. This
2160 /// is controlled by the buffer's undo grouping duration.
2161 pub fn end_transaction_at(
2162 &mut self,
2163 now: Instant,
2164 cx: &mut Context<Self>,
2165 ) -> Option<TransactionId> {
2166 assert!(self.transaction_depth > 0);
2167 self.transaction_depth -= 1;
2168 let was_dirty = if self.transaction_depth == 0 {
2169 self.was_dirty_before_starting_transaction.take().unwrap()
2170 } else {
2171 false
2172 };
2173 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
2174 self.did_edit(&start_version, was_dirty, cx);
2175 Some(transaction_id)
2176 } else {
2177 None
2178 }
2179 }
2180
2181 /// Manually add a transaction to the buffer's undo history.
2182 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
2183 self.text.push_transaction(transaction, now);
2184 }
2185
2186 /// Differs from `push_transaction` in that it does not clear the redo
2187 /// stack. Intended to be used to create a parent transaction to merge
2188 /// potential child transactions into.
2189 ///
2190 /// The caller is responsible for removing it from the undo history using
2191 /// `forget_transaction` if no edits are merged into it. Otherwise, if edits
2192 /// are merged into this transaction, the caller is responsible for ensuring
2193 /// the redo stack is cleared. The easiest way to ensure the redo stack is
2194 /// cleared is to create transactions with the usual `start_transaction` and
2195 /// `end_transaction` methods and merging the resulting transactions into
2196 /// the transaction created by this method
2197 pub fn push_empty_transaction(&mut self, now: Instant) -> TransactionId {
2198 self.text.push_empty_transaction(now)
2199 }
2200
2201 /// Prevent the last transaction from being grouped with any subsequent transactions,
2202 /// even if they occur with the buffer's undo grouping duration.
2203 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
2204 self.text.finalize_last_transaction()
2205 }
2206
2207 /// Manually group all changes since a given transaction.
2208 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
2209 self.text.group_until_transaction(transaction_id);
2210 }
2211
2212 /// Manually remove a transaction from the buffer's undo history
2213 pub fn forget_transaction(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
2214 self.text.forget_transaction(transaction_id)
2215 }
2216
2217 /// Retrieve a transaction from the buffer's undo history
2218 pub fn get_transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> {
2219 self.text.get_transaction(transaction_id)
2220 }
2221
2222 /// Manually merge two transactions in the buffer's undo history.
2223 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
2224 self.text.merge_transactions(transaction, destination);
2225 }
2226
2227 /// Waits for the buffer to receive operations with the given timestamps.
2228 pub fn wait_for_edits<It: IntoIterator<Item = clock::Lamport>>(
2229 &mut self,
2230 edit_ids: It,
2231 ) -> impl Future<Output = Result<()>> + use<It> {
2232 self.text.wait_for_edits(edit_ids)
2233 }
2234
2235 /// Waits for the buffer to receive the operations necessary for resolving the given anchors.
2236 pub fn wait_for_anchors<It: IntoIterator<Item = Anchor>>(
2237 &mut self,
2238 anchors: It,
2239 ) -> impl 'static + Future<Output = Result<()>> + use<It> {
2240 self.text.wait_for_anchors(anchors)
2241 }
2242
2243 /// Waits for the buffer to receive operations up to the given version.
2244 pub fn wait_for_version(
2245 &mut self,
2246 version: clock::Global,
2247 ) -> impl Future<Output = Result<()>> + use<> {
2248 self.text.wait_for_version(version)
2249 }
2250
2251 /// Forces all futures returned by [`Buffer::wait_for_version`], [`Buffer::wait_for_edits`], or
2252 /// [`Buffer::wait_for_version`] to resolve with an error.
2253 pub fn give_up_waiting(&mut self) {
2254 self.text.give_up_waiting();
2255 }
2256
2257 pub fn wait_for_autoindent_applied(&mut self) -> Option<oneshot::Receiver<()>> {
2258 let mut rx = None;
2259 if !self.autoindent_requests.is_empty() {
2260 let channel = oneshot::channel();
2261 self.wait_for_autoindent_txs.push(channel.0);
2262 rx = Some(channel.1);
2263 }
2264 rx
2265 }
2266
2267 /// Stores a set of selections that should be broadcasted to all of the buffer's replicas.
2268 pub fn set_active_selections(
2269 &mut self,
2270 selections: Arc<[Selection<Anchor>]>,
2271 line_mode: bool,
2272 cursor_shape: CursorShape,
2273 cx: &mut Context<Self>,
2274 ) {
2275 let lamport_timestamp = self.text.lamport_clock.tick();
2276 self.remote_selections.insert(
2277 self.text.replica_id(),
2278 SelectionSet {
2279 selections: selections.clone(),
2280 lamport_timestamp,
2281 line_mode,
2282 cursor_shape,
2283 },
2284 );
2285 self.send_operation(
2286 Operation::UpdateSelections {
2287 selections,
2288 line_mode,
2289 lamport_timestamp,
2290 cursor_shape,
2291 },
2292 true,
2293 cx,
2294 );
2295 self.non_text_state_update_count += 1;
2296 cx.notify();
2297 }
2298
2299 /// Clears the selections, so that other replicas of the buffer do not see any selections for
2300 /// this replica.
2301 pub fn remove_active_selections(&mut self, cx: &mut Context<Self>) {
2302 if self
2303 .remote_selections
2304 .get(&self.text.replica_id())
2305 .is_none_or(|set| !set.selections.is_empty())
2306 {
2307 self.set_active_selections(Arc::default(), false, Default::default(), cx);
2308 }
2309 }
2310
2311 pub fn set_agent_selections(
2312 &mut self,
2313 selections: Arc<[Selection<Anchor>]>,
2314 line_mode: bool,
2315 cursor_shape: CursorShape,
2316 cx: &mut Context<Self>,
2317 ) {
2318 let lamport_timestamp = self.text.lamport_clock.tick();
2319 self.remote_selections.insert(
2320 ReplicaId::AGENT,
2321 SelectionSet {
2322 selections,
2323 lamport_timestamp,
2324 line_mode,
2325 cursor_shape,
2326 },
2327 );
2328 self.non_text_state_update_count += 1;
2329 cx.notify();
2330 }
2331
2332 pub fn remove_agent_selections(&mut self, cx: &mut Context<Self>) {
2333 self.set_agent_selections(Arc::default(), false, Default::default(), cx);
2334 }
2335
2336 /// Replaces the buffer's entire text.
2337 pub fn set_text<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2338 where
2339 T: Into<Arc<str>>,
2340 {
2341 self.autoindent_requests.clear();
2342 self.edit([(0..self.len(), text)], None, cx)
2343 }
2344
2345 /// Appends the given text to the end of the buffer.
2346 pub fn append<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2347 where
2348 T: Into<Arc<str>>,
2349 {
2350 self.edit([(self.len()..self.len(), text)], None, cx)
2351 }
2352
2353 /// Applies the given edits to the buffer. Each edit is specified as a range of text to
2354 /// delete, and a string of text to insert at that location.
2355 ///
2356 /// If an [`AutoindentMode`] is provided, then the buffer will enqueue an auto-indent
2357 /// request for the edited ranges, which will be processed when the buffer finishes
2358 /// parsing.
2359 ///
2360 /// Parsing takes place at the end of a transaction, and may compute synchronously
2361 /// or asynchronously, depending on the changes.
2362 pub fn edit<I, S, T>(
2363 &mut self,
2364 edits_iter: I,
2365 autoindent_mode: Option<AutoindentMode>,
2366 cx: &mut Context<Self>,
2367 ) -> Option<clock::Lamport>
2368 where
2369 I: IntoIterator<Item = (Range<S>, T)>,
2370 S: ToOffset,
2371 T: Into<Arc<str>>,
2372 {
2373 // Skip invalid edits and coalesce contiguous ones.
2374 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
2375
2376 for (range, new_text) in edits_iter {
2377 let mut range = range.start.to_offset(self)..range.end.to_offset(self);
2378
2379 if range.start > range.end {
2380 mem::swap(&mut range.start, &mut range.end);
2381 }
2382 let new_text = new_text.into();
2383 if !new_text.is_empty() || !range.is_empty() {
2384 if let Some((prev_range, prev_text)) = edits.last_mut()
2385 && prev_range.end >= range.start
2386 {
2387 prev_range.end = cmp::max(prev_range.end, range.end);
2388 *prev_text = format!("{prev_text}{new_text}").into();
2389 } else {
2390 edits.push((range, new_text));
2391 }
2392 }
2393 }
2394 if edits.is_empty() {
2395 return None;
2396 }
2397
2398 self.start_transaction();
2399 self.pending_autoindent.take();
2400 let autoindent_request = autoindent_mode
2401 .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
2402
2403 let edit_operation = self
2404 .text
2405 .edit(edits.iter().cloned(), cx.background_executor());
2406 let edit_id = edit_operation.timestamp();
2407
2408 if let Some((before_edit, mode)) = autoindent_request {
2409 let mut delta = 0isize;
2410 let mut previous_setting = None;
2411 let entries: Vec<_> = edits
2412 .into_iter()
2413 .enumerate()
2414 .zip(&edit_operation.as_edit().unwrap().new_text)
2415 .filter(|((_, (range, _)), _)| {
2416 let language = before_edit.language_at(range.start);
2417 let language_id = language.map(|l| l.id());
2418 if let Some((cached_language_id, auto_indent)) = previous_setting
2419 && cached_language_id == language_id
2420 {
2421 auto_indent
2422 } else {
2423 // The auto-indent setting is not present in editorconfigs, hence
2424 // we can avoid passing the file here.
2425 let auto_indent =
2426 language_settings(language.map(|l| l.name()), None, cx).auto_indent;
2427 previous_setting = Some((language_id, auto_indent));
2428 auto_indent
2429 }
2430 })
2431 .map(|((ix, (range, _)), new_text)| {
2432 let new_text_length = new_text.len();
2433 let old_start = range.start.to_point(&before_edit);
2434 let new_start = (delta + range.start as isize) as usize;
2435 let range_len = range.end - range.start;
2436 delta += new_text_length as isize - range_len as isize;
2437
2438 // Decide what range of the insertion to auto-indent, and whether
2439 // the first line of the insertion should be considered a newly-inserted line
2440 // or an edit to an existing line.
2441 let mut range_of_insertion_to_indent = 0..new_text_length;
2442 let mut first_line_is_new = true;
2443
2444 let old_line_start = before_edit.indent_size_for_line(old_start.row).len;
2445 let old_line_end = before_edit.line_len(old_start.row);
2446
2447 if old_start.column > old_line_start {
2448 first_line_is_new = false;
2449 }
2450
2451 if !new_text.contains('\n')
2452 && (old_start.column + (range_len as u32) < old_line_end
2453 || old_line_end == old_line_start)
2454 {
2455 first_line_is_new = false;
2456 }
2457
2458 // When inserting text starting with a newline, avoid auto-indenting the
2459 // previous line.
2460 if new_text.starts_with('\n') {
2461 range_of_insertion_to_indent.start += 1;
2462 first_line_is_new = true;
2463 }
2464
2465 let mut original_indent_column = None;
2466 if let AutoindentMode::Block {
2467 original_indent_columns,
2468 } = &mode
2469 {
2470 original_indent_column = Some(if new_text.starts_with('\n') {
2471 indent_size_for_text(
2472 new_text[range_of_insertion_to_indent.clone()].chars(),
2473 )
2474 .len
2475 } else {
2476 original_indent_columns
2477 .get(ix)
2478 .copied()
2479 .flatten()
2480 .unwrap_or_else(|| {
2481 indent_size_for_text(
2482 new_text[range_of_insertion_to_indent.clone()].chars(),
2483 )
2484 .len
2485 })
2486 });
2487
2488 // Avoid auto-indenting the line after the edit.
2489 if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
2490 range_of_insertion_to_indent.end -= 1;
2491 }
2492 }
2493
2494 AutoindentRequestEntry {
2495 first_line_is_new,
2496 original_indent_column,
2497 indent_size: before_edit.language_indent_size_at(range.start, cx),
2498 range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
2499 ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
2500 }
2501 })
2502 .collect();
2503
2504 if !entries.is_empty() {
2505 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2506 before_edit,
2507 entries,
2508 is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
2509 ignore_empty_lines: false,
2510 }));
2511 }
2512 }
2513
2514 self.end_transaction(cx);
2515 self.send_operation(Operation::Buffer(edit_operation), true, cx);
2516 Some(edit_id)
2517 }
2518
2519 fn did_edit(&mut self, old_version: &clock::Global, was_dirty: bool, cx: &mut Context<Self>) {
2520 self.was_changed();
2521
2522 if self.edits_since::<usize>(old_version).next().is_none() {
2523 return;
2524 }
2525
2526 self.reparse(cx);
2527 cx.emit(BufferEvent::Edited);
2528 if was_dirty != self.is_dirty() {
2529 cx.emit(BufferEvent::DirtyChanged);
2530 }
2531 cx.notify();
2532 }
2533
2534 pub fn autoindent_ranges<I, T>(&mut self, ranges: I, cx: &mut Context<Self>)
2535 where
2536 I: IntoIterator<Item = Range<T>>,
2537 T: ToOffset + Copy,
2538 {
2539 let before_edit = self.snapshot();
2540 let entries = ranges
2541 .into_iter()
2542 .map(|range| AutoindentRequestEntry {
2543 range: before_edit.anchor_before(range.start)..before_edit.anchor_after(range.end),
2544 first_line_is_new: true,
2545 indent_size: before_edit.language_indent_size_at(range.start, cx),
2546 original_indent_column: None,
2547 })
2548 .collect();
2549 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2550 before_edit,
2551 entries,
2552 is_block_mode: false,
2553 ignore_empty_lines: true,
2554 }));
2555 self.request_autoindent(cx);
2556 }
2557
2558 // Inserts newlines at the given position to create an empty line, returning the start of the new line.
2559 // You can also request the insertion of empty lines above and below the line starting at the returned point.
2560 pub fn insert_empty_line(
2561 &mut self,
2562 position: impl ToPoint,
2563 space_above: bool,
2564 space_below: bool,
2565 cx: &mut Context<Self>,
2566 ) -> Point {
2567 let mut position = position.to_point(self);
2568
2569 self.start_transaction();
2570
2571 self.edit(
2572 [(position..position, "\n")],
2573 Some(AutoindentMode::EachLine),
2574 cx,
2575 );
2576
2577 if position.column > 0 {
2578 position += Point::new(1, 0);
2579 }
2580
2581 if !self.is_line_blank(position.row) {
2582 self.edit(
2583 [(position..position, "\n")],
2584 Some(AutoindentMode::EachLine),
2585 cx,
2586 );
2587 }
2588
2589 if space_above && position.row > 0 && !self.is_line_blank(position.row - 1) {
2590 self.edit(
2591 [(position..position, "\n")],
2592 Some(AutoindentMode::EachLine),
2593 cx,
2594 );
2595 position.row += 1;
2596 }
2597
2598 if space_below
2599 && (position.row == self.max_point().row || !self.is_line_blank(position.row + 1))
2600 {
2601 self.edit(
2602 [(position..position, "\n")],
2603 Some(AutoindentMode::EachLine),
2604 cx,
2605 );
2606 }
2607
2608 self.end_transaction(cx);
2609
2610 position
2611 }
2612
2613 /// Applies the given remote operations to the buffer.
2614 pub fn apply_ops<I: IntoIterator<Item = Operation>>(&mut self, ops: I, cx: &mut Context<Self>) {
2615 self.pending_autoindent.take();
2616 let was_dirty = self.is_dirty();
2617 let old_version = self.version.clone();
2618 let mut deferred_ops = Vec::new();
2619 let buffer_ops = ops
2620 .into_iter()
2621 .filter_map(|op| match op {
2622 Operation::Buffer(op) => Some(op),
2623 _ => {
2624 if self.can_apply_op(&op) {
2625 self.apply_op(op, cx);
2626 } else {
2627 deferred_ops.push(op);
2628 }
2629 None
2630 }
2631 })
2632 .collect::<Vec<_>>();
2633 for operation in buffer_ops.iter() {
2634 self.send_operation(Operation::Buffer(operation.clone()), false, cx);
2635 }
2636 self.text
2637 .apply_ops(buffer_ops, Some(cx.background_executor()));
2638 self.deferred_ops.insert(deferred_ops);
2639 self.flush_deferred_ops(cx);
2640 self.did_edit(&old_version, was_dirty, cx);
2641 // Notify independently of whether the buffer was edited as the operations could include a
2642 // selection update.
2643 cx.notify();
2644 }
2645
2646 fn flush_deferred_ops(&mut self, cx: &mut Context<Self>) {
2647 let mut deferred_ops = Vec::new();
2648 for op in self.deferred_ops.drain().iter().cloned() {
2649 if self.can_apply_op(&op) {
2650 self.apply_op(op, cx);
2651 } else {
2652 deferred_ops.push(op);
2653 }
2654 }
2655 self.deferred_ops.insert(deferred_ops);
2656 }
2657
2658 pub fn has_deferred_ops(&self) -> bool {
2659 !self.deferred_ops.is_empty() || self.text.has_deferred_ops()
2660 }
2661
2662 fn can_apply_op(&self, operation: &Operation) -> bool {
2663 match operation {
2664 Operation::Buffer(_) => {
2665 unreachable!("buffer operations should never be applied at this layer")
2666 }
2667 Operation::UpdateDiagnostics {
2668 diagnostics: diagnostic_set,
2669 ..
2670 } => diagnostic_set.iter().all(|diagnostic| {
2671 self.text.can_resolve(&diagnostic.range.start)
2672 && self.text.can_resolve(&diagnostic.range.end)
2673 }),
2674 Operation::UpdateSelections { selections, .. } => selections
2675 .iter()
2676 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
2677 Operation::UpdateCompletionTriggers { .. } | Operation::UpdateLineEnding { .. } => true,
2678 }
2679 }
2680
2681 fn apply_op(&mut self, operation: Operation, cx: &mut Context<Self>) {
2682 match operation {
2683 Operation::Buffer(_) => {
2684 unreachable!("buffer operations should never be applied at this layer")
2685 }
2686 Operation::UpdateDiagnostics {
2687 server_id,
2688 diagnostics: diagnostic_set,
2689 lamport_timestamp,
2690 } => {
2691 let snapshot = self.snapshot();
2692 self.apply_diagnostic_update(
2693 server_id,
2694 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
2695 lamport_timestamp,
2696 cx,
2697 );
2698 }
2699 Operation::UpdateSelections {
2700 selections,
2701 lamport_timestamp,
2702 line_mode,
2703 cursor_shape,
2704 } => {
2705 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id)
2706 && set.lamport_timestamp > lamport_timestamp
2707 {
2708 return;
2709 }
2710
2711 self.remote_selections.insert(
2712 lamport_timestamp.replica_id,
2713 SelectionSet {
2714 selections,
2715 lamport_timestamp,
2716 line_mode,
2717 cursor_shape,
2718 },
2719 );
2720 self.text.lamport_clock.observe(lamport_timestamp);
2721 self.non_text_state_update_count += 1;
2722 }
2723 Operation::UpdateCompletionTriggers {
2724 triggers,
2725 lamport_timestamp,
2726 server_id,
2727 } => {
2728 if triggers.is_empty() {
2729 self.completion_triggers_per_language_server
2730 .remove(&server_id);
2731 self.completion_triggers = self
2732 .completion_triggers_per_language_server
2733 .values()
2734 .flat_map(|triggers| triggers.iter().cloned())
2735 .collect();
2736 } else {
2737 self.completion_triggers_per_language_server
2738 .insert(server_id, triggers.iter().cloned().collect());
2739 self.completion_triggers.extend(triggers);
2740 }
2741 self.text.lamport_clock.observe(lamport_timestamp);
2742 }
2743 Operation::UpdateLineEnding {
2744 line_ending,
2745 lamport_timestamp,
2746 } => {
2747 self.text.set_line_ending(line_ending);
2748 self.text.lamport_clock.observe(lamport_timestamp);
2749 }
2750 }
2751 }
2752
2753 fn apply_diagnostic_update(
2754 &mut self,
2755 server_id: LanguageServerId,
2756 diagnostics: DiagnosticSet,
2757 lamport_timestamp: clock::Lamport,
2758 cx: &mut Context<Self>,
2759 ) {
2760 if lamport_timestamp > self.diagnostics_timestamp {
2761 let ix = self.diagnostics.binary_search_by_key(&server_id, |e| e.0);
2762 if diagnostics.is_empty() {
2763 if let Ok(ix) = ix {
2764 self.diagnostics.remove(ix);
2765 }
2766 } else {
2767 match ix {
2768 Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)),
2769 Ok(ix) => self.diagnostics[ix].1 = diagnostics,
2770 };
2771 }
2772 self.diagnostics_timestamp = lamport_timestamp;
2773 self.non_text_state_update_count += 1;
2774 self.text.lamport_clock.observe(lamport_timestamp);
2775 cx.notify();
2776 cx.emit(BufferEvent::DiagnosticsUpdated);
2777 }
2778 }
2779
2780 fn send_operation(&mut self, operation: Operation, is_local: bool, cx: &mut Context<Self>) {
2781 self.was_changed();
2782 cx.emit(BufferEvent::Operation {
2783 operation,
2784 is_local,
2785 });
2786 }
2787
2788 /// Removes the selections for a given peer.
2789 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut Context<Self>) {
2790 self.remote_selections.remove(&replica_id);
2791 cx.notify();
2792 }
2793
2794 /// Undoes the most recent transaction.
2795 pub fn undo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2796 let was_dirty = self.is_dirty();
2797 let old_version = self.version.clone();
2798
2799 if let Some((transaction_id, operation)) = self.text.undo() {
2800 self.send_operation(Operation::Buffer(operation), true, cx);
2801 self.did_edit(&old_version, was_dirty, cx);
2802 Some(transaction_id)
2803 } else {
2804 None
2805 }
2806 }
2807
2808 /// Manually undoes a specific transaction in the buffer's undo history.
2809 pub fn undo_transaction(
2810 &mut self,
2811 transaction_id: TransactionId,
2812 cx: &mut Context<Self>,
2813 ) -> bool {
2814 let was_dirty = self.is_dirty();
2815 let old_version = self.version.clone();
2816 if let Some(operation) = self.text.undo_transaction(transaction_id) {
2817 self.send_operation(Operation::Buffer(operation), true, cx);
2818 self.did_edit(&old_version, was_dirty, cx);
2819 true
2820 } else {
2821 false
2822 }
2823 }
2824
2825 /// Manually undoes all changes after a given transaction in the buffer's undo history.
2826 pub fn undo_to_transaction(
2827 &mut self,
2828 transaction_id: TransactionId,
2829 cx: &mut Context<Self>,
2830 ) -> bool {
2831 let was_dirty = self.is_dirty();
2832 let old_version = self.version.clone();
2833
2834 let operations = self.text.undo_to_transaction(transaction_id);
2835 let undone = !operations.is_empty();
2836 for operation in operations {
2837 self.send_operation(Operation::Buffer(operation), true, cx);
2838 }
2839 if undone {
2840 self.did_edit(&old_version, was_dirty, cx)
2841 }
2842 undone
2843 }
2844
2845 pub fn undo_operations(&mut self, counts: HashMap<Lamport, u32>, cx: &mut Context<Buffer>) {
2846 let was_dirty = self.is_dirty();
2847 let operation = self.text.undo_operations(counts);
2848 let old_version = self.version.clone();
2849 self.send_operation(Operation::Buffer(operation), true, cx);
2850 self.did_edit(&old_version, was_dirty, cx);
2851 }
2852
2853 /// Manually redoes a specific transaction in the buffer's redo history.
2854 pub fn redo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2855 let was_dirty = self.is_dirty();
2856 let old_version = self.version.clone();
2857
2858 if let Some((transaction_id, operation)) = self.text.redo() {
2859 self.send_operation(Operation::Buffer(operation), true, cx);
2860 self.did_edit(&old_version, was_dirty, cx);
2861 Some(transaction_id)
2862 } else {
2863 None
2864 }
2865 }
2866
2867 /// Manually undoes all changes until a given transaction in the buffer's redo history.
2868 pub fn redo_to_transaction(
2869 &mut self,
2870 transaction_id: TransactionId,
2871 cx: &mut Context<Self>,
2872 ) -> bool {
2873 let was_dirty = self.is_dirty();
2874 let old_version = self.version.clone();
2875
2876 let operations = self.text.redo_to_transaction(transaction_id);
2877 let redone = !operations.is_empty();
2878 for operation in operations {
2879 self.send_operation(Operation::Buffer(operation), true, cx);
2880 }
2881 if redone {
2882 self.did_edit(&old_version, was_dirty, cx)
2883 }
2884 redone
2885 }
2886
2887 /// Override current completion triggers with the user-provided completion triggers.
2888 pub fn set_completion_triggers(
2889 &mut self,
2890 server_id: LanguageServerId,
2891 triggers: BTreeSet<String>,
2892 cx: &mut Context<Self>,
2893 ) {
2894 self.completion_triggers_timestamp = self.text.lamport_clock.tick();
2895 if triggers.is_empty() {
2896 self.completion_triggers_per_language_server
2897 .remove(&server_id);
2898 self.completion_triggers = self
2899 .completion_triggers_per_language_server
2900 .values()
2901 .flat_map(|triggers| triggers.iter().cloned())
2902 .collect();
2903 } else {
2904 self.completion_triggers_per_language_server
2905 .insert(server_id, triggers.clone());
2906 self.completion_triggers.extend(triggers.iter().cloned());
2907 }
2908 self.send_operation(
2909 Operation::UpdateCompletionTriggers {
2910 triggers: triggers.into_iter().collect(),
2911 lamport_timestamp: self.completion_triggers_timestamp,
2912 server_id,
2913 },
2914 true,
2915 cx,
2916 );
2917 cx.notify();
2918 }
2919
2920 /// Returns a list of strings which trigger a completion menu for this language.
2921 /// Usually this is driven by LSP server which returns a list of trigger characters for completions.
2922 pub fn completion_triggers(&self) -> &BTreeSet<String> {
2923 &self.completion_triggers
2924 }
2925
2926 /// Call this directly after performing edits to prevent the preview tab
2927 /// from being dismissed by those edits. It causes `should_dismiss_preview`
2928 /// to return false until there are additional edits.
2929 pub fn refresh_preview(&mut self) {
2930 self.preview_version = self.version.clone();
2931 }
2932
2933 /// Whether we should preserve the preview status of a tab containing this buffer.
2934 pub fn preserve_preview(&self) -> bool {
2935 !self.has_edits_since(&self.preview_version)
2936 }
2937
2938 /// Update the buffer
2939 pub fn update_encoding(&mut self, encoding: Encoding) {
2940 self.encoding.set(encoding.get());
2941 }
2942}
2943
2944#[doc(hidden)]
2945#[cfg(any(test, feature = "test-support"))]
2946impl Buffer {
2947 pub fn edit_via_marked_text(
2948 &mut self,
2949 marked_string: &str,
2950 autoindent_mode: Option<AutoindentMode>,
2951 cx: &mut Context<Self>,
2952 ) {
2953 let edits = self.edits_for_marked_text(marked_string);
2954 self.edit(edits, autoindent_mode, cx);
2955 }
2956
2957 pub fn set_group_interval(&mut self, group_interval: Duration) {
2958 self.text.set_group_interval(group_interval);
2959 }
2960
2961 pub fn randomly_edit<T>(&mut self, rng: &mut T, old_range_count: usize, cx: &mut Context<Self>)
2962 where
2963 T: rand::Rng,
2964 {
2965 let mut edits: Vec<(Range<usize>, String)> = Vec::new();
2966 let mut last_end = None;
2967 for _ in 0..old_range_count {
2968 if last_end.is_some_and(|last_end| last_end >= self.len()) {
2969 break;
2970 }
2971
2972 let new_start = last_end.map_or(0, |last_end| last_end + 1);
2973 let mut range = self.random_byte_range(new_start, rng);
2974 if rng.random_bool(0.2) {
2975 mem::swap(&mut range.start, &mut range.end);
2976 }
2977 last_end = Some(range.end);
2978
2979 let new_text_len = rng.random_range(0..10);
2980 let mut new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
2981 new_text = new_text.to_uppercase();
2982
2983 edits.push((range, new_text));
2984 }
2985 log::info!("mutating buffer {:?} with {:?}", self.replica_id(), edits);
2986 self.edit(edits, None, cx);
2987 }
2988
2989 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut Context<Self>) {
2990 let was_dirty = self.is_dirty();
2991 let old_version = self.version.clone();
2992
2993 let ops = self.text.randomly_undo_redo(rng);
2994 if !ops.is_empty() {
2995 for op in ops {
2996 self.send_operation(Operation::Buffer(op), true, cx);
2997 self.did_edit(&old_version, was_dirty, cx);
2998 }
2999 }
3000 }
3001}
3002
3003impl EventEmitter<BufferEvent> for Buffer {}
3004
3005impl Deref for Buffer {
3006 type Target = TextBuffer;
3007
3008 fn deref(&self) -> &Self::Target {
3009 &self.text
3010 }
3011}
3012
3013impl BufferSnapshot {
3014 /// Returns [`IndentSize`] for a given line that respects user settings and
3015 /// language preferences.
3016 pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
3017 indent_size_for_line(self, row)
3018 }
3019
3020 /// Returns [`IndentSize`] for a given position that respects user settings
3021 /// and language preferences.
3022 pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &App) -> IndentSize {
3023 let settings = language_settings(
3024 self.language_at(position).map(|l| l.name()),
3025 self.file(),
3026 cx,
3027 );
3028 if settings.hard_tabs {
3029 IndentSize::tab()
3030 } else {
3031 IndentSize::spaces(settings.tab_size.get())
3032 }
3033 }
3034
3035 /// Retrieve the suggested indent size for all of the given rows. The unit of indentation
3036 /// is passed in as `single_indent_size`.
3037 pub fn suggested_indents(
3038 &self,
3039 rows: impl Iterator<Item = u32>,
3040 single_indent_size: IndentSize,
3041 ) -> BTreeMap<u32, IndentSize> {
3042 let mut result = BTreeMap::new();
3043
3044 for row_range in contiguous_ranges(rows, 10) {
3045 let suggestions = match self.suggest_autoindents(row_range.clone()) {
3046 Some(suggestions) => suggestions,
3047 _ => break,
3048 };
3049
3050 for (row, suggestion) in row_range.zip(suggestions) {
3051 let indent_size = if let Some(suggestion) = suggestion {
3052 result
3053 .get(&suggestion.basis_row)
3054 .copied()
3055 .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
3056 .with_delta(suggestion.delta, single_indent_size)
3057 } else {
3058 self.indent_size_for_line(row)
3059 };
3060
3061 result.insert(row, indent_size);
3062 }
3063 }
3064
3065 result
3066 }
3067
3068 fn suggest_autoindents(
3069 &self,
3070 row_range: Range<u32>,
3071 ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
3072 let config = &self.language.as_ref()?.config;
3073 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
3074
3075 #[derive(Debug, Clone)]
3076 struct StartPosition {
3077 start: Point,
3078 suffix: SharedString,
3079 }
3080
3081 // Find the suggested indentation ranges based on the syntax tree.
3082 let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
3083 let end = Point::new(row_range.end, 0);
3084 let range = (start..end).to_offset(&self.text);
3085 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3086 Some(&grammar.indents_config.as_ref()?.query)
3087 });
3088 let indent_configs = matches
3089 .grammars()
3090 .iter()
3091 .map(|grammar| grammar.indents_config.as_ref().unwrap())
3092 .collect::<Vec<_>>();
3093
3094 let mut indent_ranges = Vec::<Range<Point>>::new();
3095 let mut start_positions = Vec::<StartPosition>::new();
3096 let mut outdent_positions = Vec::<Point>::new();
3097 while let Some(mat) = matches.peek() {
3098 let mut start: Option<Point> = None;
3099 let mut end: Option<Point> = None;
3100
3101 let config = indent_configs[mat.grammar_index];
3102 for capture in mat.captures {
3103 if capture.index == config.indent_capture_ix {
3104 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
3105 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
3106 } else if Some(capture.index) == config.start_capture_ix {
3107 start = Some(Point::from_ts_point(capture.node.end_position()));
3108 } else if Some(capture.index) == config.end_capture_ix {
3109 end = Some(Point::from_ts_point(capture.node.start_position()));
3110 } else if Some(capture.index) == config.outdent_capture_ix {
3111 outdent_positions.push(Point::from_ts_point(capture.node.start_position()));
3112 } else if let Some(suffix) = config.suffixed_start_captures.get(&capture.index) {
3113 start_positions.push(StartPosition {
3114 start: Point::from_ts_point(capture.node.start_position()),
3115 suffix: suffix.clone(),
3116 });
3117 }
3118 }
3119
3120 matches.advance();
3121 if let Some((start, end)) = start.zip(end) {
3122 if start.row == end.row {
3123 continue;
3124 }
3125 let range = start..end;
3126 match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
3127 Err(ix) => indent_ranges.insert(ix, range),
3128 Ok(ix) => {
3129 let prev_range = &mut indent_ranges[ix];
3130 prev_range.end = prev_range.end.max(range.end);
3131 }
3132 }
3133 }
3134 }
3135
3136 let mut error_ranges = Vec::<Range<Point>>::new();
3137 let mut matches = self
3138 .syntax
3139 .matches(range, &self.text, |grammar| grammar.error_query.as_ref());
3140 while let Some(mat) = matches.peek() {
3141 let node = mat.captures[0].node;
3142 let start = Point::from_ts_point(node.start_position());
3143 let end = Point::from_ts_point(node.end_position());
3144 let range = start..end;
3145 let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
3146 Ok(ix) | Err(ix) => ix,
3147 };
3148 let mut end_ix = ix;
3149 while let Some(existing_range) = error_ranges.get(end_ix) {
3150 if existing_range.end < end {
3151 end_ix += 1;
3152 } else {
3153 break;
3154 }
3155 }
3156 error_ranges.splice(ix..end_ix, [range]);
3157 matches.advance();
3158 }
3159
3160 outdent_positions.sort();
3161 for outdent_position in outdent_positions {
3162 // find the innermost indent range containing this outdent_position
3163 // set its end to the outdent position
3164 if let Some(range_to_truncate) = indent_ranges
3165 .iter_mut()
3166 .filter(|indent_range| indent_range.contains(&outdent_position))
3167 .next_back()
3168 {
3169 range_to_truncate.end = outdent_position;
3170 }
3171 }
3172
3173 start_positions.sort_by_key(|b| b.start);
3174
3175 // Find the suggested indentation increases and decreased based on regexes.
3176 let mut regex_outdent_map = HashMap::default();
3177 let mut last_seen_suffix: HashMap<String, Vec<Point>> = HashMap::default();
3178 let mut start_positions_iter = start_positions.iter().peekable();
3179
3180 let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
3181 self.for_each_line(
3182 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
3183 ..Point::new(row_range.end, 0),
3184 |row, line| {
3185 if config
3186 .decrease_indent_pattern
3187 .as_ref()
3188 .is_some_and(|regex| regex.is_match(line))
3189 {
3190 indent_change_rows.push((row, Ordering::Less));
3191 }
3192 if config
3193 .increase_indent_pattern
3194 .as_ref()
3195 .is_some_and(|regex| regex.is_match(line))
3196 {
3197 indent_change_rows.push((row + 1, Ordering::Greater));
3198 }
3199 while let Some(pos) = start_positions_iter.peek() {
3200 if pos.start.row < row {
3201 let pos = start_positions_iter.next().unwrap();
3202 last_seen_suffix
3203 .entry(pos.suffix.to_string())
3204 .or_default()
3205 .push(pos.start);
3206 } else {
3207 break;
3208 }
3209 }
3210 for rule in &config.decrease_indent_patterns {
3211 if rule.pattern.as_ref().is_some_and(|r| r.is_match(line)) {
3212 let row_start_column = self.indent_size_for_line(row).len;
3213 let basis_row = rule
3214 .valid_after
3215 .iter()
3216 .filter_map(|valid_suffix| last_seen_suffix.get(valid_suffix))
3217 .flatten()
3218 .filter(|start_point| start_point.column <= row_start_column)
3219 .max_by_key(|start_point| start_point.row);
3220 if let Some(outdent_to_row) = basis_row {
3221 regex_outdent_map.insert(row, outdent_to_row.row);
3222 }
3223 break;
3224 }
3225 }
3226 },
3227 );
3228
3229 let mut indent_changes = indent_change_rows.into_iter().peekable();
3230 let mut prev_row = if config.auto_indent_using_last_non_empty_line {
3231 prev_non_blank_row.unwrap_or(0)
3232 } else {
3233 row_range.start.saturating_sub(1)
3234 };
3235
3236 let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
3237 Some(row_range.map(move |row| {
3238 let row_start = Point::new(row, self.indent_size_for_line(row).len);
3239
3240 let mut indent_from_prev_row = false;
3241 let mut outdent_from_prev_row = false;
3242 let mut outdent_to_row = u32::MAX;
3243 let mut from_regex = false;
3244
3245 while let Some((indent_row, delta)) = indent_changes.peek() {
3246 match indent_row.cmp(&row) {
3247 Ordering::Equal => match delta {
3248 Ordering::Less => {
3249 from_regex = true;
3250 outdent_from_prev_row = true
3251 }
3252 Ordering::Greater => {
3253 indent_from_prev_row = true;
3254 from_regex = true
3255 }
3256 _ => {}
3257 },
3258
3259 Ordering::Greater => break,
3260 Ordering::Less => {}
3261 }
3262
3263 indent_changes.next();
3264 }
3265
3266 for range in &indent_ranges {
3267 if range.start.row >= row {
3268 break;
3269 }
3270 if range.start.row == prev_row && range.end > row_start {
3271 indent_from_prev_row = true;
3272 }
3273 if range.end > prev_row_start && range.end <= row_start {
3274 outdent_to_row = outdent_to_row.min(range.start.row);
3275 }
3276 }
3277
3278 if let Some(basis_row) = regex_outdent_map.get(&row) {
3279 indent_from_prev_row = false;
3280 outdent_to_row = *basis_row;
3281 from_regex = true;
3282 }
3283
3284 let within_error = error_ranges
3285 .iter()
3286 .any(|e| e.start.row < row && e.end > row_start);
3287
3288 let suggestion = if outdent_to_row == prev_row
3289 || (outdent_from_prev_row && indent_from_prev_row)
3290 {
3291 Some(IndentSuggestion {
3292 basis_row: prev_row,
3293 delta: Ordering::Equal,
3294 within_error: within_error && !from_regex,
3295 })
3296 } else if indent_from_prev_row {
3297 Some(IndentSuggestion {
3298 basis_row: prev_row,
3299 delta: Ordering::Greater,
3300 within_error: within_error && !from_regex,
3301 })
3302 } else if outdent_to_row < prev_row {
3303 Some(IndentSuggestion {
3304 basis_row: outdent_to_row,
3305 delta: Ordering::Equal,
3306 within_error: within_error && !from_regex,
3307 })
3308 } else if outdent_from_prev_row {
3309 Some(IndentSuggestion {
3310 basis_row: prev_row,
3311 delta: Ordering::Less,
3312 within_error: within_error && !from_regex,
3313 })
3314 } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
3315 {
3316 Some(IndentSuggestion {
3317 basis_row: prev_row,
3318 delta: Ordering::Equal,
3319 within_error: within_error && !from_regex,
3320 })
3321 } else {
3322 None
3323 };
3324
3325 prev_row = row;
3326 prev_row_start = row_start;
3327 suggestion
3328 }))
3329 }
3330
3331 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
3332 while row > 0 {
3333 row -= 1;
3334 if !self.is_line_blank(row) {
3335 return Some(row);
3336 }
3337 }
3338 None
3339 }
3340
3341 fn get_highlights(&self, range: Range<usize>) -> (SyntaxMapCaptures<'_>, Vec<HighlightMap>) {
3342 let captures = self.syntax.captures(range, &self.text, |grammar| {
3343 grammar
3344 .highlights_config
3345 .as_ref()
3346 .map(|config| &config.query)
3347 });
3348 let highlight_maps = captures
3349 .grammars()
3350 .iter()
3351 .map(|grammar| grammar.highlight_map())
3352 .collect();
3353 (captures, highlight_maps)
3354 }
3355
3356 /// Iterates over chunks of text in the given range of the buffer. Text is chunked
3357 /// in an arbitrary way due to being stored in a [`Rope`](text::Rope). The text is also
3358 /// returned in chunks where each chunk has a single syntax highlighting style and
3359 /// diagnostic status.
3360 pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks<'_> {
3361 let range = range.start.to_offset(self)..range.end.to_offset(self);
3362
3363 let mut syntax = None;
3364 if language_aware {
3365 syntax = Some(self.get_highlights(range.clone()));
3366 }
3367 // We want to look at diagnostic spans only when iterating over language-annotated chunks.
3368 let diagnostics = language_aware;
3369 BufferChunks::new(self.text.as_rope(), range, syntax, diagnostics, Some(self))
3370 }
3371
3372 pub fn highlighted_text_for_range<T: ToOffset>(
3373 &self,
3374 range: Range<T>,
3375 override_style: Option<HighlightStyle>,
3376 syntax_theme: &SyntaxTheme,
3377 ) -> HighlightedText {
3378 HighlightedText::from_buffer_range(
3379 range,
3380 &self.text,
3381 &self.syntax,
3382 override_style,
3383 syntax_theme,
3384 )
3385 }
3386
3387 /// Invokes the given callback for each line of text in the given range of the buffer.
3388 /// Uses callback to avoid allocating a string for each line.
3389 fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
3390 let mut line = String::new();
3391 let mut row = range.start.row;
3392 for chunk in self
3393 .as_rope()
3394 .chunks_in_range(range.to_offset(self))
3395 .chain(["\n"])
3396 {
3397 for (newline_ix, text) in chunk.split('\n').enumerate() {
3398 if newline_ix > 0 {
3399 callback(row, &line);
3400 row += 1;
3401 line.clear();
3402 }
3403 line.push_str(text);
3404 }
3405 }
3406 }
3407
3408 /// Iterates over every [`SyntaxLayer`] in the buffer.
3409 pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3410 self.syntax_layers_for_range(0..self.len(), true)
3411 }
3412
3413 pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayer<'_>> {
3414 let offset = position.to_offset(self);
3415 self.syntax_layers_for_range(offset..offset, false)
3416 .filter(|l| l.node().end_byte() > offset)
3417 .last()
3418 }
3419
3420 pub fn syntax_layers_for_range<D: ToOffset>(
3421 &self,
3422 range: Range<D>,
3423 include_hidden: bool,
3424 ) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3425 self.syntax
3426 .layers_for_range(range, &self.text, include_hidden)
3427 }
3428
3429 pub fn smallest_syntax_layer_containing<D: ToOffset>(
3430 &self,
3431 range: Range<D>,
3432 ) -> Option<SyntaxLayer<'_>> {
3433 let range = range.to_offset(self);
3434 self.syntax
3435 .layers_for_range(range, &self.text, false)
3436 .max_by(|a, b| {
3437 if a.depth != b.depth {
3438 a.depth.cmp(&b.depth)
3439 } else if a.offset.0 != b.offset.0 {
3440 a.offset.0.cmp(&b.offset.0)
3441 } else {
3442 a.node().end_byte().cmp(&b.node().end_byte()).reverse()
3443 }
3444 })
3445 }
3446
3447 /// Returns the main [`Language`].
3448 pub fn language(&self) -> Option<&Arc<Language>> {
3449 self.language.as_ref()
3450 }
3451
3452 /// Returns the [`Language`] at the given location.
3453 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
3454 self.syntax_layer_at(position)
3455 .map(|info| info.language)
3456 .or(self.language.as_ref())
3457 }
3458
3459 /// Returns the settings for the language at the given location.
3460 pub fn settings_at<'a, D: ToOffset>(
3461 &'a self,
3462 position: D,
3463 cx: &'a App,
3464 ) -> Cow<'a, LanguageSettings> {
3465 language_settings(
3466 self.language_at(position).map(|l| l.name()),
3467 self.file.as_ref(),
3468 cx,
3469 )
3470 }
3471
3472 pub fn char_classifier_at<T: ToOffset>(&self, point: T) -> CharClassifier {
3473 CharClassifier::new(self.language_scope_at(point))
3474 }
3475
3476 /// Returns the [`LanguageScope`] at the given location.
3477 pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
3478 let offset = position.to_offset(self);
3479 let mut scope = None;
3480 let mut smallest_range_and_depth: Option<(Range<usize>, usize)> = None;
3481
3482 // Use the layer that has the smallest node intersecting the given point.
3483 for layer in self
3484 .syntax
3485 .layers_for_range(offset..offset, &self.text, false)
3486 {
3487 let mut cursor = layer.node().walk();
3488
3489 let mut range = None;
3490 loop {
3491 let child_range = cursor.node().byte_range();
3492 if !child_range.contains(&offset) {
3493 break;
3494 }
3495
3496 range = Some(child_range);
3497 if cursor.goto_first_child_for_byte(offset).is_none() {
3498 break;
3499 }
3500 }
3501
3502 if let Some(range) = range
3503 && smallest_range_and_depth.as_ref().is_none_or(
3504 |(smallest_range, smallest_range_depth)| {
3505 if layer.depth > *smallest_range_depth {
3506 true
3507 } else if layer.depth == *smallest_range_depth {
3508 range.len() < smallest_range.len()
3509 } else {
3510 false
3511 }
3512 },
3513 )
3514 {
3515 smallest_range_and_depth = Some((range, layer.depth));
3516 scope = Some(LanguageScope {
3517 language: layer.language.clone(),
3518 override_id: layer.override_id(offset, &self.text),
3519 });
3520 }
3521 }
3522
3523 scope.or_else(|| {
3524 self.language.clone().map(|language| LanguageScope {
3525 language,
3526 override_id: None,
3527 })
3528 })
3529 }
3530
3531 /// Returns a tuple of the range and character kind of the word
3532 /// surrounding the given position.
3533 pub fn surrounding_word<T: ToOffset>(
3534 &self,
3535 start: T,
3536 scope_context: Option<CharScopeContext>,
3537 ) -> (Range<usize>, Option<CharKind>) {
3538 let mut start = start.to_offset(self);
3539 let mut end = start;
3540 let mut next_chars = self.chars_at(start).take(128).peekable();
3541 let mut prev_chars = self.reversed_chars_at(start).take(128).peekable();
3542
3543 let classifier = self.char_classifier_at(start).scope_context(scope_context);
3544 let word_kind = cmp::max(
3545 prev_chars.peek().copied().map(|c| classifier.kind(c)),
3546 next_chars.peek().copied().map(|c| classifier.kind(c)),
3547 );
3548
3549 for ch in prev_chars {
3550 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3551 start -= ch.len_utf8();
3552 } else {
3553 break;
3554 }
3555 }
3556
3557 for ch in next_chars {
3558 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3559 end += ch.len_utf8();
3560 } else {
3561 break;
3562 }
3563 }
3564
3565 (start..end, word_kind)
3566 }
3567
3568 /// Moves the TreeCursor to the smallest descendant or ancestor syntax node enclosing the given
3569 /// range. When `require_larger` is true, the node found must be larger than the query range.
3570 ///
3571 /// Returns true if a node was found, and false otherwise. In the `false` case the cursor will
3572 /// be moved to the root of the tree.
3573 fn goto_node_enclosing_range(
3574 cursor: &mut tree_sitter::TreeCursor,
3575 query_range: &Range<usize>,
3576 require_larger: bool,
3577 ) -> bool {
3578 let mut ascending = false;
3579 loop {
3580 let mut range = cursor.node().byte_range();
3581 if query_range.is_empty() {
3582 // When the query range is empty and the current node starts after it, move to the
3583 // previous sibling to find the node the containing node.
3584 if range.start > query_range.start {
3585 cursor.goto_previous_sibling();
3586 range = cursor.node().byte_range();
3587 }
3588 } else {
3589 // When the query range is non-empty and the current node ends exactly at the start,
3590 // move to the next sibling to find a node that extends beyond the start.
3591 if range.end == query_range.start {
3592 cursor.goto_next_sibling();
3593 range = cursor.node().byte_range();
3594 }
3595 }
3596
3597 let encloses = range.contains_inclusive(query_range)
3598 && (!require_larger || range.len() > query_range.len());
3599 if !encloses {
3600 ascending = true;
3601 if !cursor.goto_parent() {
3602 return false;
3603 }
3604 continue;
3605 } else if ascending {
3606 return true;
3607 }
3608
3609 // Descend into the current node.
3610 if cursor
3611 .goto_first_child_for_byte(query_range.start)
3612 .is_none()
3613 {
3614 return true;
3615 }
3616 }
3617 }
3618
3619 pub fn syntax_ancestor<'a, T: ToOffset>(
3620 &'a self,
3621 range: Range<T>,
3622 ) -> Option<tree_sitter::Node<'a>> {
3623 let range = range.start.to_offset(self)..range.end.to_offset(self);
3624 let mut result: Option<tree_sitter::Node<'a>> = None;
3625 for layer in self
3626 .syntax
3627 .layers_for_range(range.clone(), &self.text, true)
3628 {
3629 let mut cursor = layer.node().walk();
3630
3631 // Find the node that both contains the range and is larger than it.
3632 if !Self::goto_node_enclosing_range(&mut cursor, &range, true) {
3633 continue;
3634 }
3635
3636 let left_node = cursor.node();
3637 let mut layer_result = left_node;
3638
3639 // For an empty range, try to find another node immediately to the right of the range.
3640 if left_node.end_byte() == range.start {
3641 let mut right_node = None;
3642 while !cursor.goto_next_sibling() {
3643 if !cursor.goto_parent() {
3644 break;
3645 }
3646 }
3647
3648 while cursor.node().start_byte() == range.start {
3649 right_node = Some(cursor.node());
3650 if !cursor.goto_first_child() {
3651 break;
3652 }
3653 }
3654
3655 // If there is a candidate node on both sides of the (empty) range, then
3656 // decide between the two by favoring a named node over an anonymous token.
3657 // If both nodes are the same in that regard, favor the right one.
3658 if let Some(right_node) = right_node
3659 && (right_node.is_named() || !left_node.is_named())
3660 {
3661 layer_result = right_node;
3662 }
3663 }
3664
3665 if let Some(previous_result) = &result
3666 && previous_result.byte_range().len() < layer_result.byte_range().len()
3667 {
3668 continue;
3669 }
3670 result = Some(layer_result);
3671 }
3672
3673 result
3674 }
3675
3676 /// Find the previous sibling syntax node at the given range.
3677 ///
3678 /// This function locates the syntax node that precedes the node containing
3679 /// the given range. It searches hierarchically by:
3680 /// 1. Finding the node that contains the given range
3681 /// 2. Looking for the previous sibling at the same tree level
3682 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
3683 ///
3684 /// Returns `None` if there is no previous sibling at any ancestor level.
3685 pub fn syntax_prev_sibling<'a, T: ToOffset>(
3686 &'a self,
3687 range: Range<T>,
3688 ) -> Option<tree_sitter::Node<'a>> {
3689 let range = range.start.to_offset(self)..range.end.to_offset(self);
3690 let mut result: Option<tree_sitter::Node<'a>> = None;
3691
3692 for layer in self
3693 .syntax
3694 .layers_for_range(range.clone(), &self.text, true)
3695 {
3696 let mut cursor = layer.node().walk();
3697
3698 // Find the node that contains the range
3699 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
3700 continue;
3701 }
3702
3703 // Look for the previous sibling, moving up ancestor levels if needed
3704 loop {
3705 if cursor.goto_previous_sibling() {
3706 let layer_result = cursor.node();
3707
3708 if let Some(previous_result) = &result {
3709 if previous_result.byte_range().end < layer_result.byte_range().end {
3710 continue;
3711 }
3712 }
3713 result = Some(layer_result);
3714 break;
3715 }
3716
3717 // No sibling found at this level, try moving up to parent
3718 if !cursor.goto_parent() {
3719 break;
3720 }
3721 }
3722 }
3723
3724 result
3725 }
3726
3727 /// Find the next sibling syntax node at the given range.
3728 ///
3729 /// This function locates the syntax node that follows the node containing
3730 /// the given range. It searches hierarchically by:
3731 /// 1. Finding the node that contains the given range
3732 /// 2. Looking for the next sibling at the same tree level
3733 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
3734 ///
3735 /// Returns `None` if there is no next sibling at any ancestor level.
3736 pub fn syntax_next_sibling<'a, T: ToOffset>(
3737 &'a self,
3738 range: Range<T>,
3739 ) -> Option<tree_sitter::Node<'a>> {
3740 let range = range.start.to_offset(self)..range.end.to_offset(self);
3741 let mut result: Option<tree_sitter::Node<'a>> = None;
3742
3743 for layer in self
3744 .syntax
3745 .layers_for_range(range.clone(), &self.text, true)
3746 {
3747 let mut cursor = layer.node().walk();
3748
3749 // Find the node that contains the range
3750 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
3751 continue;
3752 }
3753
3754 // Look for the next sibling, moving up ancestor levels if needed
3755 loop {
3756 if cursor.goto_next_sibling() {
3757 let layer_result = cursor.node();
3758
3759 if let Some(previous_result) = &result {
3760 if previous_result.byte_range().start > layer_result.byte_range().start {
3761 continue;
3762 }
3763 }
3764 result = Some(layer_result);
3765 break;
3766 }
3767
3768 // No sibling found at this level, try moving up to parent
3769 if !cursor.goto_parent() {
3770 break;
3771 }
3772 }
3773 }
3774
3775 result
3776 }
3777
3778 /// Returns the root syntax node within the given row
3779 pub fn syntax_root_ancestor(&self, position: Anchor) -> Option<tree_sitter::Node<'_>> {
3780 let start_offset = position.to_offset(self);
3781
3782 let row = self.summary_for_anchor::<text::PointUtf16>(&position).row as usize;
3783
3784 let layer = self
3785 .syntax
3786 .layers_for_range(start_offset..start_offset, &self.text, true)
3787 .next()?;
3788
3789 let mut cursor = layer.node().walk();
3790
3791 // Descend to the first leaf that touches the start of the range.
3792 while cursor.goto_first_child_for_byte(start_offset).is_some() {
3793 if cursor.node().end_byte() == start_offset {
3794 cursor.goto_next_sibling();
3795 }
3796 }
3797
3798 // Ascend to the root node within the same row.
3799 while cursor.goto_parent() {
3800 if cursor.node().start_position().row != row {
3801 break;
3802 }
3803 }
3804
3805 Some(cursor.node())
3806 }
3807
3808 /// Returns the outline for the buffer.
3809 ///
3810 /// This method allows passing an optional [`SyntaxTheme`] to
3811 /// syntax-highlight the returned symbols.
3812 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Outline<Anchor> {
3813 Outline::new(self.outline_items_containing(0..self.len(), true, theme))
3814 }
3815
3816 /// Returns all the symbols that contain the given position.
3817 ///
3818 /// This method allows passing an optional [`SyntaxTheme`] to
3819 /// syntax-highlight the returned symbols.
3820 pub fn symbols_containing<T: ToOffset>(
3821 &self,
3822 position: T,
3823 theme: Option<&SyntaxTheme>,
3824 ) -> Vec<OutlineItem<Anchor>> {
3825 let position = position.to_offset(self);
3826 let start = self.clip_offset(position.saturating_sub(1), Bias::Left);
3827 let end = self.clip_offset(position + 1, Bias::Right);
3828 let mut items = self.outline_items_containing(start..end, false, theme);
3829 let mut prev_depth = None;
3830 items.retain(|item| {
3831 let result = prev_depth.is_none_or(|prev_depth| item.depth > prev_depth);
3832 prev_depth = Some(item.depth);
3833 result
3834 });
3835 items
3836 }
3837
3838 pub fn outline_range_containing<T: ToOffset>(&self, range: Range<T>) -> Option<Range<Point>> {
3839 let range = range.to_offset(self);
3840 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3841 grammar.outline_config.as_ref().map(|c| &c.query)
3842 });
3843 let configs = matches
3844 .grammars()
3845 .iter()
3846 .map(|g| g.outline_config.as_ref().unwrap())
3847 .collect::<Vec<_>>();
3848
3849 while let Some(mat) = matches.peek() {
3850 let config = &configs[mat.grammar_index];
3851 let containing_item_node = maybe!({
3852 let item_node = mat.captures.iter().find_map(|cap| {
3853 if cap.index == config.item_capture_ix {
3854 Some(cap.node)
3855 } else {
3856 None
3857 }
3858 })?;
3859
3860 let item_byte_range = item_node.byte_range();
3861 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3862 None
3863 } else {
3864 Some(item_node)
3865 }
3866 });
3867
3868 if let Some(item_node) = containing_item_node {
3869 return Some(
3870 Point::from_ts_point(item_node.start_position())
3871 ..Point::from_ts_point(item_node.end_position()),
3872 );
3873 }
3874
3875 matches.advance();
3876 }
3877 None
3878 }
3879
3880 pub fn outline_items_containing<T: ToOffset>(
3881 &self,
3882 range: Range<T>,
3883 include_extra_context: bool,
3884 theme: Option<&SyntaxTheme>,
3885 ) -> Vec<OutlineItem<Anchor>> {
3886 self.outline_items_containing_internal(
3887 range,
3888 include_extra_context,
3889 theme,
3890 |this, range| this.anchor_after(range.start)..this.anchor_before(range.end),
3891 )
3892 }
3893
3894 pub fn outline_items_as_points_containing<T: ToOffset>(
3895 &self,
3896 range: Range<T>,
3897 include_extra_context: bool,
3898 theme: Option<&SyntaxTheme>,
3899 ) -> Vec<OutlineItem<Point>> {
3900 self.outline_items_containing_internal(range, include_extra_context, theme, |_, range| {
3901 range
3902 })
3903 }
3904
3905 fn outline_items_containing_internal<T: ToOffset, U>(
3906 &self,
3907 range: Range<T>,
3908 include_extra_context: bool,
3909 theme: Option<&SyntaxTheme>,
3910 range_callback: fn(&Self, Range<Point>) -> Range<U>,
3911 ) -> Vec<OutlineItem<U>> {
3912 let range = range.to_offset(self);
3913 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3914 grammar.outline_config.as_ref().map(|c| &c.query)
3915 });
3916
3917 let mut items = Vec::new();
3918 let mut annotation_row_ranges: Vec<Range<u32>> = Vec::new();
3919 while let Some(mat) = matches.peek() {
3920 let config = matches.grammars()[mat.grammar_index]
3921 .outline_config
3922 .as_ref()
3923 .unwrap();
3924 if let Some(item) =
3925 self.next_outline_item(config, &mat, &range, include_extra_context, theme)
3926 {
3927 items.push(item);
3928 } else if let Some(capture) = mat
3929 .captures
3930 .iter()
3931 .find(|capture| Some(capture.index) == config.annotation_capture_ix)
3932 {
3933 let capture_range = capture.node.start_position()..capture.node.end_position();
3934 let mut capture_row_range =
3935 capture_range.start.row as u32..capture_range.end.row as u32;
3936 if capture_range.end.row > capture_range.start.row && capture_range.end.column == 0
3937 {
3938 capture_row_range.end -= 1;
3939 }
3940 if let Some(last_row_range) = annotation_row_ranges.last_mut() {
3941 if last_row_range.end >= capture_row_range.start.saturating_sub(1) {
3942 last_row_range.end = capture_row_range.end;
3943 } else {
3944 annotation_row_ranges.push(capture_row_range);
3945 }
3946 } else {
3947 annotation_row_ranges.push(capture_row_range);
3948 }
3949 }
3950 matches.advance();
3951 }
3952
3953 items.sort_by_key(|item| (item.range.start, Reverse(item.range.end)));
3954
3955 // Assign depths based on containment relationships and convert to anchors.
3956 let mut item_ends_stack = Vec::<Point>::new();
3957 let mut anchor_items = Vec::new();
3958 let mut annotation_row_ranges = annotation_row_ranges.into_iter().peekable();
3959 for item in items {
3960 while let Some(last_end) = item_ends_stack.last().copied() {
3961 if last_end < item.range.end {
3962 item_ends_stack.pop();
3963 } else {
3964 break;
3965 }
3966 }
3967
3968 let mut annotation_row_range = None;
3969 while let Some(next_annotation_row_range) = annotation_row_ranges.peek() {
3970 let row_preceding_item = item.range.start.row.saturating_sub(1);
3971 if next_annotation_row_range.end < row_preceding_item {
3972 annotation_row_ranges.next();
3973 } else {
3974 if next_annotation_row_range.end == row_preceding_item {
3975 annotation_row_range = Some(next_annotation_row_range.clone());
3976 annotation_row_ranges.next();
3977 }
3978 break;
3979 }
3980 }
3981
3982 anchor_items.push(OutlineItem {
3983 depth: item_ends_stack.len(),
3984 range: range_callback(self, item.range.clone()),
3985 source_range_for_text: range_callback(self, item.source_range_for_text.clone()),
3986 text: item.text,
3987 highlight_ranges: item.highlight_ranges,
3988 name_ranges: item.name_ranges,
3989 body_range: item.body_range.map(|r| range_callback(self, r)),
3990 annotation_range: annotation_row_range.map(|annotation_range| {
3991 let point_range = Point::new(annotation_range.start, 0)
3992 ..Point::new(annotation_range.end, self.line_len(annotation_range.end));
3993 range_callback(self, point_range)
3994 }),
3995 });
3996 item_ends_stack.push(item.range.end);
3997 }
3998
3999 anchor_items
4000 }
4001
4002 fn next_outline_item(
4003 &self,
4004 config: &OutlineConfig,
4005 mat: &SyntaxMapMatch,
4006 range: &Range<usize>,
4007 include_extra_context: bool,
4008 theme: Option<&SyntaxTheme>,
4009 ) -> Option<OutlineItem<Point>> {
4010 let item_node = mat.captures.iter().find_map(|cap| {
4011 if cap.index == config.item_capture_ix {
4012 Some(cap.node)
4013 } else {
4014 None
4015 }
4016 })?;
4017
4018 let item_byte_range = item_node.byte_range();
4019 if item_byte_range.end < range.start || item_byte_range.start > range.end {
4020 return None;
4021 }
4022 let item_point_range = Point::from_ts_point(item_node.start_position())
4023 ..Point::from_ts_point(item_node.end_position());
4024
4025 let mut open_point = None;
4026 let mut close_point = None;
4027
4028 let mut buffer_ranges = Vec::new();
4029 let mut add_to_buffer_ranges = |node: tree_sitter::Node, node_is_name| {
4030 let mut range = node.start_byte()..node.end_byte();
4031 let start = node.start_position();
4032 if node.end_position().row > start.row {
4033 range.end = range.start + self.line_len(start.row as u32) as usize - start.column;
4034 }
4035
4036 if !range.is_empty() {
4037 buffer_ranges.push((range, node_is_name));
4038 }
4039 };
4040
4041 for capture in mat.captures {
4042 if capture.index == config.name_capture_ix {
4043 add_to_buffer_ranges(capture.node, true);
4044 } else if Some(capture.index) == config.context_capture_ix
4045 || (Some(capture.index) == config.extra_context_capture_ix && include_extra_context)
4046 {
4047 add_to_buffer_ranges(capture.node, false);
4048 } else {
4049 if Some(capture.index) == config.open_capture_ix {
4050 open_point = Some(Point::from_ts_point(capture.node.end_position()));
4051 } else if Some(capture.index) == config.close_capture_ix {
4052 close_point = Some(Point::from_ts_point(capture.node.start_position()));
4053 }
4054 }
4055 }
4056
4057 if buffer_ranges.is_empty() {
4058 return None;
4059 }
4060 let source_range_for_text =
4061 buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end;
4062
4063 let mut text = String::new();
4064 let mut highlight_ranges = Vec::new();
4065 let mut name_ranges = Vec::new();
4066 let mut chunks = self.chunks(source_range_for_text.clone(), true);
4067 let mut last_buffer_range_end = 0;
4068 for (buffer_range, is_name) in buffer_ranges {
4069 let space_added = !text.is_empty() && buffer_range.start > last_buffer_range_end;
4070 if space_added {
4071 text.push(' ');
4072 }
4073 let before_append_len = text.len();
4074 let mut offset = buffer_range.start;
4075 chunks.seek(buffer_range.clone());
4076 for mut chunk in chunks.by_ref() {
4077 if chunk.text.len() > buffer_range.end - offset {
4078 chunk.text = &chunk.text[0..(buffer_range.end - offset)];
4079 offset = buffer_range.end;
4080 } else {
4081 offset += chunk.text.len();
4082 }
4083 let style = chunk
4084 .syntax_highlight_id
4085 .zip(theme)
4086 .and_then(|(highlight, theme)| highlight.style(theme));
4087 if let Some(style) = style {
4088 let start = text.len();
4089 let end = start + chunk.text.len();
4090 highlight_ranges.push((start..end, style));
4091 }
4092 text.push_str(chunk.text);
4093 if offset >= buffer_range.end {
4094 break;
4095 }
4096 }
4097 if is_name {
4098 let after_append_len = text.len();
4099 let start = if space_added && !name_ranges.is_empty() {
4100 before_append_len - 1
4101 } else {
4102 before_append_len
4103 };
4104 name_ranges.push(start..after_append_len);
4105 }
4106 last_buffer_range_end = buffer_range.end;
4107 }
4108
4109 Some(OutlineItem {
4110 depth: 0, // We'll calculate the depth later
4111 range: item_point_range,
4112 source_range_for_text: source_range_for_text.to_point(self),
4113 text,
4114 highlight_ranges,
4115 name_ranges,
4116 body_range: open_point.zip(close_point).map(|(start, end)| start..end),
4117 annotation_range: None,
4118 })
4119 }
4120
4121 pub fn function_body_fold_ranges<T: ToOffset>(
4122 &self,
4123 within: Range<T>,
4124 ) -> impl Iterator<Item = Range<usize>> + '_ {
4125 self.text_object_ranges(within, TreeSitterOptions::default())
4126 .filter_map(|(range, obj)| (obj == TextObject::InsideFunction).then_some(range))
4127 }
4128
4129 /// For each grammar in the language, runs the provided
4130 /// [`tree_sitter::Query`] against the given range.
4131 pub fn matches(
4132 &self,
4133 range: Range<usize>,
4134 query: fn(&Grammar) -> Option<&tree_sitter::Query>,
4135 ) -> SyntaxMapMatches<'_> {
4136 self.syntax.matches(range, self, query)
4137 }
4138
4139 pub fn all_bracket_ranges(
4140 &self,
4141 range: Range<usize>,
4142 ) -> impl Iterator<Item = BracketMatch> + '_ {
4143 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
4144 grammar.brackets_config.as_ref().map(|c| &c.query)
4145 });
4146 let configs = matches
4147 .grammars()
4148 .iter()
4149 .map(|grammar| grammar.brackets_config.as_ref().unwrap())
4150 .collect::<Vec<_>>();
4151
4152 iter::from_fn(move || {
4153 while let Some(mat) = matches.peek() {
4154 let mut open = None;
4155 let mut close = None;
4156 let config = &configs[mat.grammar_index];
4157 let pattern = &config.patterns[mat.pattern_index];
4158 for capture in mat.captures {
4159 if capture.index == config.open_capture_ix {
4160 open = Some(capture.node.byte_range());
4161 } else if capture.index == config.close_capture_ix {
4162 close = Some(capture.node.byte_range());
4163 }
4164 }
4165
4166 matches.advance();
4167
4168 let Some((open_range, close_range)) = open.zip(close) else {
4169 continue;
4170 };
4171
4172 let bracket_range = open_range.start..=close_range.end;
4173 if !bracket_range.overlaps(&range) {
4174 continue;
4175 }
4176
4177 return Some(BracketMatch {
4178 open_range,
4179 close_range,
4180 newline_only: pattern.newline_only,
4181 });
4182 }
4183 None
4184 })
4185 }
4186
4187 /// Returns bracket range pairs overlapping or adjacent to `range`
4188 pub fn bracket_ranges<T: ToOffset>(
4189 &self,
4190 range: Range<T>,
4191 ) -> impl Iterator<Item = BracketMatch> + '_ {
4192 // Find bracket pairs that *inclusively* contain the given range.
4193 let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self);
4194 self.all_bracket_ranges(range)
4195 .filter(|pair| !pair.newline_only)
4196 }
4197
4198 pub fn debug_variables_query<T: ToOffset>(
4199 &self,
4200 range: Range<T>,
4201 ) -> impl Iterator<Item = (Range<usize>, DebuggerTextObject)> + '_ {
4202 let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self);
4203
4204 let mut matches = self.syntax.matches_with_options(
4205 range.clone(),
4206 &self.text,
4207 TreeSitterOptions::default(),
4208 |grammar| grammar.debug_variables_config.as_ref().map(|c| &c.query),
4209 );
4210
4211 let configs = matches
4212 .grammars()
4213 .iter()
4214 .map(|grammar| grammar.debug_variables_config.as_ref())
4215 .collect::<Vec<_>>();
4216
4217 let mut captures = Vec::<(Range<usize>, DebuggerTextObject)>::new();
4218
4219 iter::from_fn(move || {
4220 loop {
4221 while let Some(capture) = captures.pop() {
4222 if capture.0.overlaps(&range) {
4223 return Some(capture);
4224 }
4225 }
4226
4227 let mat = matches.peek()?;
4228
4229 let Some(config) = configs[mat.grammar_index].as_ref() else {
4230 matches.advance();
4231 continue;
4232 };
4233
4234 for capture in mat.captures {
4235 let Some(ix) = config
4236 .objects_by_capture_ix
4237 .binary_search_by_key(&capture.index, |e| e.0)
4238 .ok()
4239 else {
4240 continue;
4241 };
4242 let text_object = config.objects_by_capture_ix[ix].1;
4243 let byte_range = capture.node.byte_range();
4244
4245 let mut found = false;
4246 for (range, existing) in captures.iter_mut() {
4247 if existing == &text_object {
4248 range.start = range.start.min(byte_range.start);
4249 range.end = range.end.max(byte_range.end);
4250 found = true;
4251 break;
4252 }
4253 }
4254
4255 if !found {
4256 captures.push((byte_range, text_object));
4257 }
4258 }
4259
4260 matches.advance();
4261 }
4262 })
4263 }
4264
4265 pub fn text_object_ranges<T: ToOffset>(
4266 &self,
4267 range: Range<T>,
4268 options: TreeSitterOptions,
4269 ) -> impl Iterator<Item = (Range<usize>, TextObject)> + '_ {
4270 let range =
4271 range.start.to_previous_offset(self)..self.len().min(range.end.to_next_offset(self));
4272
4273 let mut matches =
4274 self.syntax
4275 .matches_with_options(range.clone(), &self.text, options, |grammar| {
4276 grammar.text_object_config.as_ref().map(|c| &c.query)
4277 });
4278
4279 let configs = matches
4280 .grammars()
4281 .iter()
4282 .map(|grammar| grammar.text_object_config.as_ref())
4283 .collect::<Vec<_>>();
4284
4285 let mut captures = Vec::<(Range<usize>, TextObject)>::new();
4286
4287 iter::from_fn(move || {
4288 loop {
4289 while let Some(capture) = captures.pop() {
4290 if capture.0.overlaps(&range) {
4291 return Some(capture);
4292 }
4293 }
4294
4295 let mat = matches.peek()?;
4296
4297 let Some(config) = configs[mat.grammar_index].as_ref() else {
4298 matches.advance();
4299 continue;
4300 };
4301
4302 for capture in mat.captures {
4303 let Some(ix) = config
4304 .text_objects_by_capture_ix
4305 .binary_search_by_key(&capture.index, |e| e.0)
4306 .ok()
4307 else {
4308 continue;
4309 };
4310 let text_object = config.text_objects_by_capture_ix[ix].1;
4311 let byte_range = capture.node.byte_range();
4312
4313 let mut found = false;
4314 for (range, existing) in captures.iter_mut() {
4315 if existing == &text_object {
4316 range.start = range.start.min(byte_range.start);
4317 range.end = range.end.max(byte_range.end);
4318 found = true;
4319 break;
4320 }
4321 }
4322
4323 if !found {
4324 captures.push((byte_range, text_object));
4325 }
4326 }
4327
4328 matches.advance();
4329 }
4330 })
4331 }
4332
4333 /// Returns enclosing bracket ranges containing the given range
4334 pub fn enclosing_bracket_ranges<T: ToOffset>(
4335 &self,
4336 range: Range<T>,
4337 ) -> impl Iterator<Item = BracketMatch> + '_ {
4338 let range = range.start.to_offset(self)..range.end.to_offset(self);
4339
4340 self.bracket_ranges(range.clone()).filter(move |pair| {
4341 pair.open_range.start <= range.start && pair.close_range.end >= range.end
4342 })
4343 }
4344
4345 /// Returns the smallest enclosing bracket ranges containing the given range or None if no brackets contain range
4346 ///
4347 /// Can optionally pass a range_filter to filter the ranges of brackets to consider
4348 pub fn innermost_enclosing_bracket_ranges<T: ToOffset>(
4349 &self,
4350 range: Range<T>,
4351 range_filter: Option<&dyn Fn(Range<usize>, Range<usize>) -> bool>,
4352 ) -> Option<(Range<usize>, Range<usize>)> {
4353 let range = range.start.to_offset(self)..range.end.to_offset(self);
4354
4355 // Get the ranges of the innermost pair of brackets.
4356 let mut result: Option<(Range<usize>, Range<usize>)> = None;
4357
4358 for pair in self.enclosing_bracket_ranges(range) {
4359 if let Some(range_filter) = range_filter
4360 && !range_filter(pair.open_range.clone(), pair.close_range.clone())
4361 {
4362 continue;
4363 }
4364
4365 let len = pair.close_range.end - pair.open_range.start;
4366
4367 if let Some((existing_open, existing_close)) = &result {
4368 let existing_len = existing_close.end - existing_open.start;
4369 if len > existing_len {
4370 continue;
4371 }
4372 }
4373
4374 result = Some((pair.open_range, pair.close_range));
4375 }
4376
4377 result
4378 }
4379
4380 /// Returns anchor ranges for any matches of the redaction query.
4381 /// The buffer can be associated with multiple languages, and the redaction query associated with each
4382 /// will be run on the relevant section of the buffer.
4383 pub fn redacted_ranges<T: ToOffset>(
4384 &self,
4385 range: Range<T>,
4386 ) -> impl Iterator<Item = Range<usize>> + '_ {
4387 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4388 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4389 grammar
4390 .redactions_config
4391 .as_ref()
4392 .map(|config| &config.query)
4393 });
4394
4395 let configs = syntax_matches
4396 .grammars()
4397 .iter()
4398 .map(|grammar| grammar.redactions_config.as_ref())
4399 .collect::<Vec<_>>();
4400
4401 iter::from_fn(move || {
4402 let redacted_range = syntax_matches
4403 .peek()
4404 .and_then(|mat| {
4405 configs[mat.grammar_index].and_then(|config| {
4406 mat.captures
4407 .iter()
4408 .find(|capture| capture.index == config.redaction_capture_ix)
4409 })
4410 })
4411 .map(|mat| mat.node.byte_range());
4412 syntax_matches.advance();
4413 redacted_range
4414 })
4415 }
4416
4417 pub fn injections_intersecting_range<T: ToOffset>(
4418 &self,
4419 range: Range<T>,
4420 ) -> impl Iterator<Item = (Range<usize>, &Arc<Language>)> + '_ {
4421 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4422
4423 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4424 grammar
4425 .injection_config
4426 .as_ref()
4427 .map(|config| &config.query)
4428 });
4429
4430 let configs = syntax_matches
4431 .grammars()
4432 .iter()
4433 .map(|grammar| grammar.injection_config.as_ref())
4434 .collect::<Vec<_>>();
4435
4436 iter::from_fn(move || {
4437 let ranges = syntax_matches.peek().and_then(|mat| {
4438 let config = &configs[mat.grammar_index]?;
4439 let content_capture_range = mat.captures.iter().find_map(|capture| {
4440 if capture.index == config.content_capture_ix {
4441 Some(capture.node.byte_range())
4442 } else {
4443 None
4444 }
4445 })?;
4446 let language = self.language_at(content_capture_range.start)?;
4447 Some((content_capture_range, language))
4448 });
4449 syntax_matches.advance();
4450 ranges
4451 })
4452 }
4453
4454 pub fn runnable_ranges(
4455 &self,
4456 offset_range: Range<usize>,
4457 ) -> impl Iterator<Item = RunnableRange> + '_ {
4458 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4459 grammar.runnable_config.as_ref().map(|config| &config.query)
4460 });
4461
4462 let test_configs = syntax_matches
4463 .grammars()
4464 .iter()
4465 .map(|grammar| grammar.runnable_config.as_ref())
4466 .collect::<Vec<_>>();
4467
4468 iter::from_fn(move || {
4469 loop {
4470 let mat = syntax_matches.peek()?;
4471
4472 let test_range = test_configs[mat.grammar_index].and_then(|test_configs| {
4473 let mut run_range = None;
4474 let full_range = mat.captures.iter().fold(
4475 Range {
4476 start: usize::MAX,
4477 end: 0,
4478 },
4479 |mut acc, next| {
4480 let byte_range = next.node.byte_range();
4481 if acc.start > byte_range.start {
4482 acc.start = byte_range.start;
4483 }
4484 if acc.end < byte_range.end {
4485 acc.end = byte_range.end;
4486 }
4487 acc
4488 },
4489 );
4490 if full_range.start > full_range.end {
4491 // We did not find a full spanning range of this match.
4492 return None;
4493 }
4494 let extra_captures: SmallVec<[_; 1]> =
4495 SmallVec::from_iter(mat.captures.iter().filter_map(|capture| {
4496 test_configs
4497 .extra_captures
4498 .get(capture.index as usize)
4499 .cloned()
4500 .and_then(|tag_name| match tag_name {
4501 RunnableCapture::Named(name) => {
4502 Some((capture.node.byte_range(), name))
4503 }
4504 RunnableCapture::Run => {
4505 let _ = run_range.insert(capture.node.byte_range());
4506 None
4507 }
4508 })
4509 }));
4510 let run_range = run_range?;
4511 let tags = test_configs
4512 .query
4513 .property_settings(mat.pattern_index)
4514 .iter()
4515 .filter_map(|property| {
4516 if *property.key == *"tag" {
4517 property
4518 .value
4519 .as_ref()
4520 .map(|value| RunnableTag(value.to_string().into()))
4521 } else {
4522 None
4523 }
4524 })
4525 .collect();
4526 let extra_captures = extra_captures
4527 .into_iter()
4528 .map(|(range, name)| {
4529 (
4530 name.to_string(),
4531 self.text_for_range(range).collect::<String>(),
4532 )
4533 })
4534 .collect();
4535 // All tags should have the same range.
4536 Some(RunnableRange {
4537 run_range,
4538 full_range,
4539 runnable: Runnable {
4540 tags,
4541 language: mat.language,
4542 buffer: self.remote_id(),
4543 },
4544 extra_captures,
4545 buffer_id: self.remote_id(),
4546 })
4547 });
4548
4549 syntax_matches.advance();
4550 if test_range.is_some() {
4551 // It's fine for us to short-circuit on .peek()? returning None. We don't want to return None from this iter if we
4552 // had a capture that did not contain a run marker, hence we'll just loop around for the next capture.
4553 return test_range;
4554 }
4555 }
4556 })
4557 }
4558
4559 /// Returns selections for remote peers intersecting the given range.
4560 #[allow(clippy::type_complexity)]
4561 pub fn selections_in_range(
4562 &self,
4563 range: Range<Anchor>,
4564 include_local: bool,
4565 ) -> impl Iterator<
4566 Item = (
4567 ReplicaId,
4568 bool,
4569 CursorShape,
4570 impl Iterator<Item = &Selection<Anchor>> + '_,
4571 ),
4572 > + '_ {
4573 self.remote_selections
4574 .iter()
4575 .filter(move |(replica_id, set)| {
4576 (include_local || **replica_id != self.text.replica_id())
4577 && !set.selections.is_empty()
4578 })
4579 .map(move |(replica_id, set)| {
4580 let start_ix = match set.selections.binary_search_by(|probe| {
4581 probe.end.cmp(&range.start, self).then(Ordering::Greater)
4582 }) {
4583 Ok(ix) | Err(ix) => ix,
4584 };
4585 let end_ix = match set.selections.binary_search_by(|probe| {
4586 probe.start.cmp(&range.end, self).then(Ordering::Less)
4587 }) {
4588 Ok(ix) | Err(ix) => ix,
4589 };
4590
4591 (
4592 *replica_id,
4593 set.line_mode,
4594 set.cursor_shape,
4595 set.selections[start_ix..end_ix].iter(),
4596 )
4597 })
4598 }
4599
4600 /// Returns if the buffer contains any diagnostics.
4601 pub fn has_diagnostics(&self) -> bool {
4602 !self.diagnostics.is_empty()
4603 }
4604
4605 /// Returns all the diagnostics intersecting the given range.
4606 pub fn diagnostics_in_range<'a, T, O>(
4607 &'a self,
4608 search_range: Range<T>,
4609 reversed: bool,
4610 ) -> impl 'a + Iterator<Item = DiagnosticEntryRef<'a, O>>
4611 where
4612 T: 'a + Clone + ToOffset,
4613 O: 'a + FromAnchor,
4614 {
4615 let mut iterators: Vec<_> = self
4616 .diagnostics
4617 .iter()
4618 .map(|(_, collection)| {
4619 collection
4620 .range::<T, text::Anchor>(search_range.clone(), self, true, reversed)
4621 .peekable()
4622 })
4623 .collect();
4624
4625 std::iter::from_fn(move || {
4626 let (next_ix, _) = iterators
4627 .iter_mut()
4628 .enumerate()
4629 .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
4630 .min_by(|(_, a), (_, b)| {
4631 let cmp = a
4632 .range
4633 .start
4634 .cmp(&b.range.start, self)
4635 // when range is equal, sort by diagnostic severity
4636 .then(a.diagnostic.severity.cmp(&b.diagnostic.severity))
4637 // and stabilize order with group_id
4638 .then(a.diagnostic.group_id.cmp(&b.diagnostic.group_id));
4639 if reversed { cmp.reverse() } else { cmp }
4640 })?;
4641 iterators[next_ix]
4642 .next()
4643 .map(
4644 |DiagnosticEntryRef { range, diagnostic }| DiagnosticEntryRef {
4645 diagnostic,
4646 range: FromAnchor::from_anchor(&range.start, self)
4647 ..FromAnchor::from_anchor(&range.end, self),
4648 },
4649 )
4650 })
4651 }
4652
4653 /// Raw access to the diagnostic sets. Typically `diagnostic_groups` or `diagnostic_group`
4654 /// should be used instead.
4655 pub fn diagnostic_sets(&self) -> &SmallVec<[(LanguageServerId, DiagnosticSet); 2]> {
4656 &self.diagnostics
4657 }
4658
4659 /// Returns all the diagnostic groups associated with the given
4660 /// language server ID. If no language server ID is provided,
4661 /// all diagnostics groups are returned.
4662 pub fn diagnostic_groups(
4663 &self,
4664 language_server_id: Option<LanguageServerId>,
4665 ) -> Vec<(LanguageServerId, DiagnosticGroup<'_, Anchor>)> {
4666 let mut groups = Vec::new();
4667
4668 if let Some(language_server_id) = language_server_id {
4669 if let Ok(ix) = self
4670 .diagnostics
4671 .binary_search_by_key(&language_server_id, |e| e.0)
4672 {
4673 self.diagnostics[ix]
4674 .1
4675 .groups(language_server_id, &mut groups, self);
4676 }
4677 } else {
4678 for (language_server_id, diagnostics) in self.diagnostics.iter() {
4679 diagnostics.groups(*language_server_id, &mut groups, self);
4680 }
4681 }
4682
4683 groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
4684 let a_start = &group_a.entries[group_a.primary_ix].range.start;
4685 let b_start = &group_b.entries[group_b.primary_ix].range.start;
4686 a_start.cmp(b_start, self).then_with(|| id_a.cmp(id_b))
4687 });
4688
4689 groups
4690 }
4691
4692 /// Returns an iterator over the diagnostics for the given group.
4693 pub fn diagnostic_group<O>(
4694 &self,
4695 group_id: usize,
4696 ) -> impl Iterator<Item = DiagnosticEntryRef<'_, O>> + use<'_, O>
4697 where
4698 O: FromAnchor + 'static,
4699 {
4700 self.diagnostics
4701 .iter()
4702 .flat_map(move |(_, set)| set.group(group_id, self))
4703 }
4704
4705 /// An integer version number that accounts for all updates besides
4706 /// the buffer's text itself (which is versioned via a version vector).
4707 pub fn non_text_state_update_count(&self) -> usize {
4708 self.non_text_state_update_count
4709 }
4710
4711 /// An integer version that changes when the buffer's syntax changes.
4712 pub fn syntax_update_count(&self) -> usize {
4713 self.syntax.update_count()
4714 }
4715
4716 /// Returns a snapshot of underlying file.
4717 pub fn file(&self) -> Option<&Arc<dyn File>> {
4718 self.file.as_ref()
4719 }
4720
4721 pub fn resolve_file_path(&self, include_root: bool, cx: &App) -> Option<String> {
4722 if let Some(file) = self.file() {
4723 if file.path().file_name().is_none() || include_root {
4724 Some(file.full_path(cx).to_string_lossy().into_owned())
4725 } else {
4726 Some(file.path().display(file.path_style(cx)).to_string())
4727 }
4728 } else {
4729 None
4730 }
4731 }
4732
4733 pub fn words_in_range(&self, query: WordsQuery) -> BTreeMap<String, Range<Anchor>> {
4734 let query_str = query.fuzzy_contents;
4735 if query_str.is_some_and(|query| query.is_empty()) {
4736 return BTreeMap::default();
4737 }
4738
4739 let classifier = CharClassifier::new(self.language.clone().map(|language| LanguageScope {
4740 language,
4741 override_id: None,
4742 }));
4743
4744 let mut query_ix = 0;
4745 let query_chars = query_str.map(|query| query.chars().collect::<Vec<_>>());
4746 let query_len = query_chars.as_ref().map_or(0, |query| query.len());
4747
4748 let mut words = BTreeMap::default();
4749 let mut current_word_start_ix = None;
4750 let mut chunk_ix = query.range.start;
4751 for chunk in self.chunks(query.range, false) {
4752 for (i, c) in chunk.text.char_indices() {
4753 let ix = chunk_ix + i;
4754 if classifier.is_word(c) {
4755 if current_word_start_ix.is_none() {
4756 current_word_start_ix = Some(ix);
4757 }
4758
4759 if let Some(query_chars) = &query_chars
4760 && query_ix < query_len
4761 && c.to_lowercase().eq(query_chars[query_ix].to_lowercase())
4762 {
4763 query_ix += 1;
4764 }
4765 continue;
4766 } else if let Some(word_start) = current_word_start_ix.take()
4767 && query_ix == query_len
4768 {
4769 let word_range = self.anchor_before(word_start)..self.anchor_after(ix);
4770 let mut word_text = self.text_for_range(word_start..ix).peekable();
4771 let first_char = word_text
4772 .peek()
4773 .and_then(|first_chunk| first_chunk.chars().next());
4774 // Skip empty and "words" starting with digits as a heuristic to reduce useless completions
4775 if !query.skip_digits
4776 || first_char.is_none_or(|first_char| !first_char.is_digit(10))
4777 {
4778 words.insert(word_text.collect(), word_range);
4779 }
4780 }
4781 query_ix = 0;
4782 }
4783 chunk_ix += chunk.text.len();
4784 }
4785
4786 words
4787 }
4788}
4789
4790pub struct WordsQuery<'a> {
4791 /// Only returns words with all chars from the fuzzy string in them.
4792 pub fuzzy_contents: Option<&'a str>,
4793 /// Skips words that start with a digit.
4794 pub skip_digits: bool,
4795 /// Buffer offset range, to look for words.
4796 pub range: Range<usize>,
4797}
4798
4799fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
4800 indent_size_for_text(text.chars_at(Point::new(row, 0)))
4801}
4802
4803fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
4804 let mut result = IndentSize::spaces(0);
4805 for c in text {
4806 let kind = match c {
4807 ' ' => IndentKind::Space,
4808 '\t' => IndentKind::Tab,
4809 _ => break,
4810 };
4811 if result.len == 0 {
4812 result.kind = kind;
4813 }
4814 result.len += 1;
4815 }
4816 result
4817}
4818
4819impl Clone for BufferSnapshot {
4820 fn clone(&self) -> Self {
4821 Self {
4822 text: self.text.clone(),
4823 syntax: self.syntax.clone(),
4824 file: self.file.clone(),
4825 remote_selections: self.remote_selections.clone(),
4826 diagnostics: self.diagnostics.clone(),
4827 language: self.language.clone(),
4828 non_text_state_update_count: self.non_text_state_update_count,
4829 }
4830 }
4831}
4832
4833impl Deref for BufferSnapshot {
4834 type Target = text::BufferSnapshot;
4835
4836 fn deref(&self) -> &Self::Target {
4837 &self.text
4838 }
4839}
4840
4841unsafe impl Send for BufferChunks<'_> {}
4842
4843impl<'a> BufferChunks<'a> {
4844 pub(crate) fn new(
4845 text: &'a Rope,
4846 range: Range<usize>,
4847 syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
4848 diagnostics: bool,
4849 buffer_snapshot: Option<&'a BufferSnapshot>,
4850 ) -> Self {
4851 let mut highlights = None;
4852 if let Some((captures, highlight_maps)) = syntax {
4853 highlights = Some(BufferChunkHighlights {
4854 captures,
4855 next_capture: None,
4856 stack: Default::default(),
4857 highlight_maps,
4858 })
4859 }
4860
4861 let diagnostic_endpoints = diagnostics.then(|| Vec::new().into_iter().peekable());
4862 let chunks = text.chunks_in_range(range.clone());
4863
4864 let mut this = BufferChunks {
4865 range,
4866 buffer_snapshot,
4867 chunks,
4868 diagnostic_endpoints,
4869 error_depth: 0,
4870 warning_depth: 0,
4871 information_depth: 0,
4872 hint_depth: 0,
4873 unnecessary_depth: 0,
4874 underline: true,
4875 highlights,
4876 };
4877 this.initialize_diagnostic_endpoints();
4878 this
4879 }
4880
4881 /// Seeks to the given byte offset in the buffer.
4882 pub fn seek(&mut self, range: Range<usize>) {
4883 let old_range = std::mem::replace(&mut self.range, range.clone());
4884 self.chunks.set_range(self.range.clone());
4885 if let Some(highlights) = self.highlights.as_mut() {
4886 if old_range.start <= self.range.start && old_range.end >= self.range.end {
4887 // Reuse existing highlights stack, as the new range is a subrange of the old one.
4888 highlights
4889 .stack
4890 .retain(|(end_offset, _)| *end_offset > range.start);
4891 if let Some(capture) = &highlights.next_capture
4892 && range.start >= capture.node.start_byte()
4893 {
4894 let next_capture_end = capture.node.end_byte();
4895 if range.start < next_capture_end {
4896 highlights.stack.push((
4897 next_capture_end,
4898 highlights.highlight_maps[capture.grammar_index].get(capture.index),
4899 ));
4900 }
4901 highlights.next_capture.take();
4902 }
4903 } else if let Some(snapshot) = self.buffer_snapshot {
4904 let (captures, highlight_maps) = snapshot.get_highlights(self.range.clone());
4905 *highlights = BufferChunkHighlights {
4906 captures,
4907 next_capture: None,
4908 stack: Default::default(),
4909 highlight_maps,
4910 };
4911 } else {
4912 // We cannot obtain new highlights for a language-aware buffer iterator, as we don't have a buffer snapshot.
4913 // Seeking such BufferChunks is not supported.
4914 debug_assert!(
4915 false,
4916 "Attempted to seek on a language-aware buffer iterator without associated buffer snapshot"
4917 );
4918 }
4919
4920 highlights.captures.set_byte_range(self.range.clone());
4921 self.initialize_diagnostic_endpoints();
4922 }
4923 }
4924
4925 fn initialize_diagnostic_endpoints(&mut self) {
4926 if let Some(diagnostics) = self.diagnostic_endpoints.as_mut()
4927 && let Some(buffer) = self.buffer_snapshot
4928 {
4929 let mut diagnostic_endpoints = Vec::new();
4930 for entry in buffer.diagnostics_in_range::<_, usize>(self.range.clone(), false) {
4931 diagnostic_endpoints.push(DiagnosticEndpoint {
4932 offset: entry.range.start,
4933 is_start: true,
4934 severity: entry.diagnostic.severity,
4935 is_unnecessary: entry.diagnostic.is_unnecessary,
4936 underline: entry.diagnostic.underline,
4937 });
4938 diagnostic_endpoints.push(DiagnosticEndpoint {
4939 offset: entry.range.end,
4940 is_start: false,
4941 severity: entry.diagnostic.severity,
4942 is_unnecessary: entry.diagnostic.is_unnecessary,
4943 underline: entry.diagnostic.underline,
4944 });
4945 }
4946 diagnostic_endpoints
4947 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
4948 *diagnostics = diagnostic_endpoints.into_iter().peekable();
4949 self.hint_depth = 0;
4950 self.error_depth = 0;
4951 self.warning_depth = 0;
4952 self.information_depth = 0;
4953 }
4954 }
4955
4956 /// The current byte offset in the buffer.
4957 pub fn offset(&self) -> usize {
4958 self.range.start
4959 }
4960
4961 pub fn range(&self) -> Range<usize> {
4962 self.range.clone()
4963 }
4964
4965 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
4966 let depth = match endpoint.severity {
4967 DiagnosticSeverity::ERROR => &mut self.error_depth,
4968 DiagnosticSeverity::WARNING => &mut self.warning_depth,
4969 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
4970 DiagnosticSeverity::HINT => &mut self.hint_depth,
4971 _ => return,
4972 };
4973 if endpoint.is_start {
4974 *depth += 1;
4975 } else {
4976 *depth -= 1;
4977 }
4978
4979 if endpoint.is_unnecessary {
4980 if endpoint.is_start {
4981 self.unnecessary_depth += 1;
4982 } else {
4983 self.unnecessary_depth -= 1;
4984 }
4985 }
4986 }
4987
4988 fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
4989 if self.error_depth > 0 {
4990 Some(DiagnosticSeverity::ERROR)
4991 } else if self.warning_depth > 0 {
4992 Some(DiagnosticSeverity::WARNING)
4993 } else if self.information_depth > 0 {
4994 Some(DiagnosticSeverity::INFORMATION)
4995 } else if self.hint_depth > 0 {
4996 Some(DiagnosticSeverity::HINT)
4997 } else {
4998 None
4999 }
5000 }
5001
5002 fn current_code_is_unnecessary(&self) -> bool {
5003 self.unnecessary_depth > 0
5004 }
5005}
5006
5007impl<'a> Iterator for BufferChunks<'a> {
5008 type Item = Chunk<'a>;
5009
5010 fn next(&mut self) -> Option<Self::Item> {
5011 let mut next_capture_start = usize::MAX;
5012 let mut next_diagnostic_endpoint = usize::MAX;
5013
5014 if let Some(highlights) = self.highlights.as_mut() {
5015 while let Some((parent_capture_end, _)) = highlights.stack.last() {
5016 if *parent_capture_end <= self.range.start {
5017 highlights.stack.pop();
5018 } else {
5019 break;
5020 }
5021 }
5022
5023 if highlights.next_capture.is_none() {
5024 highlights.next_capture = highlights.captures.next();
5025 }
5026
5027 while let Some(capture) = highlights.next_capture.as_ref() {
5028 if self.range.start < capture.node.start_byte() {
5029 next_capture_start = capture.node.start_byte();
5030 break;
5031 } else {
5032 let highlight_id =
5033 highlights.highlight_maps[capture.grammar_index].get(capture.index);
5034 highlights
5035 .stack
5036 .push((capture.node.end_byte(), highlight_id));
5037 highlights.next_capture = highlights.captures.next();
5038 }
5039 }
5040 }
5041
5042 let mut diagnostic_endpoints = std::mem::take(&mut self.diagnostic_endpoints);
5043 if let Some(diagnostic_endpoints) = diagnostic_endpoints.as_mut() {
5044 while let Some(endpoint) = diagnostic_endpoints.peek().copied() {
5045 if endpoint.offset <= self.range.start {
5046 self.update_diagnostic_depths(endpoint);
5047 diagnostic_endpoints.next();
5048 self.underline = endpoint.underline;
5049 } else {
5050 next_diagnostic_endpoint = endpoint.offset;
5051 break;
5052 }
5053 }
5054 }
5055 self.diagnostic_endpoints = diagnostic_endpoints;
5056
5057 if let Some(ChunkBitmaps {
5058 text: chunk,
5059 chars: chars_map,
5060 tabs,
5061 }) = self.chunks.peek_with_bitmaps()
5062 {
5063 let chunk_start = self.range.start;
5064 let mut chunk_end = (self.chunks.offset() + chunk.len())
5065 .min(next_capture_start)
5066 .min(next_diagnostic_endpoint);
5067 let mut highlight_id = None;
5068 if let Some(highlights) = self.highlights.as_ref()
5069 && let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last()
5070 {
5071 chunk_end = chunk_end.min(*parent_capture_end);
5072 highlight_id = Some(*parent_highlight_id);
5073 }
5074 let bit_start = chunk_start - self.chunks.offset();
5075 let bit_end = chunk_end - self.chunks.offset();
5076
5077 let slice = &chunk[bit_start..bit_end];
5078
5079 let mask = 1u128.unbounded_shl(bit_end as u32).wrapping_sub(1);
5080 let tabs = (tabs >> bit_start) & mask;
5081 let chars = (chars_map >> bit_start) & mask;
5082
5083 self.range.start = chunk_end;
5084 if self.range.start == self.chunks.offset() + chunk.len() {
5085 self.chunks.next().unwrap();
5086 }
5087
5088 Some(Chunk {
5089 text: slice,
5090 syntax_highlight_id: highlight_id,
5091 underline: self.underline,
5092 diagnostic_severity: self.current_diagnostic_severity(),
5093 is_unnecessary: self.current_code_is_unnecessary(),
5094 tabs,
5095 chars,
5096 ..Chunk::default()
5097 })
5098 } else {
5099 None
5100 }
5101 }
5102}
5103
5104impl operation_queue::Operation for Operation {
5105 fn lamport_timestamp(&self) -> clock::Lamport {
5106 match self {
5107 Operation::Buffer(_) => {
5108 unreachable!("buffer operations should never be deferred at this layer")
5109 }
5110 Operation::UpdateDiagnostics {
5111 lamport_timestamp, ..
5112 }
5113 | Operation::UpdateSelections {
5114 lamport_timestamp, ..
5115 }
5116 | Operation::UpdateCompletionTriggers {
5117 lamport_timestamp, ..
5118 }
5119 | Operation::UpdateLineEnding {
5120 lamport_timestamp, ..
5121 } => *lamport_timestamp,
5122 }
5123 }
5124}
5125
5126impl Default for Diagnostic {
5127 fn default() -> Self {
5128 Self {
5129 source: Default::default(),
5130 source_kind: DiagnosticSourceKind::Other,
5131 code: None,
5132 code_description: None,
5133 severity: DiagnosticSeverity::ERROR,
5134 message: Default::default(),
5135 markdown: None,
5136 group_id: 0,
5137 is_primary: false,
5138 is_disk_based: false,
5139 is_unnecessary: false,
5140 underline: true,
5141 data: None,
5142 }
5143 }
5144}
5145
5146impl IndentSize {
5147 /// Returns an [`IndentSize`] representing the given spaces.
5148 pub fn spaces(len: u32) -> Self {
5149 Self {
5150 len,
5151 kind: IndentKind::Space,
5152 }
5153 }
5154
5155 /// Returns an [`IndentSize`] representing a tab.
5156 pub fn tab() -> Self {
5157 Self {
5158 len: 1,
5159 kind: IndentKind::Tab,
5160 }
5161 }
5162
5163 /// An iterator over the characters represented by this [`IndentSize`].
5164 pub fn chars(&self) -> impl Iterator<Item = char> {
5165 iter::repeat(self.char()).take(self.len as usize)
5166 }
5167
5168 /// The character representation of this [`IndentSize`].
5169 pub fn char(&self) -> char {
5170 match self.kind {
5171 IndentKind::Space => ' ',
5172 IndentKind::Tab => '\t',
5173 }
5174 }
5175
5176 /// Consumes the current [`IndentSize`] and returns a new one that has
5177 /// been shrunk or enlarged by the given size along the given direction.
5178 pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
5179 match direction {
5180 Ordering::Less => {
5181 if self.kind == size.kind && self.len >= size.len {
5182 self.len -= size.len;
5183 }
5184 }
5185 Ordering::Equal => {}
5186 Ordering::Greater => {
5187 if self.len == 0 {
5188 self = size;
5189 } else if self.kind == size.kind {
5190 self.len += size.len;
5191 }
5192 }
5193 }
5194 self
5195 }
5196
5197 pub fn len_with_expanded_tabs(&self, tab_size: NonZeroU32) -> usize {
5198 match self.kind {
5199 IndentKind::Space => self.len as usize,
5200 IndentKind::Tab => self.len as usize * tab_size.get() as usize,
5201 }
5202 }
5203}
5204
5205#[cfg(any(test, feature = "test-support"))]
5206pub struct TestFile {
5207 pub path: Arc<RelPath>,
5208 pub root_name: String,
5209 pub local_root: Option<PathBuf>,
5210}
5211
5212#[cfg(any(test, feature = "test-support"))]
5213impl File for TestFile {
5214 fn path(&self) -> &Arc<RelPath> {
5215 &self.path
5216 }
5217
5218 fn full_path(&self, _: &gpui::App) -> PathBuf {
5219 PathBuf::from(self.root_name.clone()).join(self.path.as_std_path())
5220 }
5221
5222 fn as_local(&self) -> Option<&dyn LocalFile> {
5223 if self.local_root.is_some() {
5224 Some(self)
5225 } else {
5226 None
5227 }
5228 }
5229
5230 fn disk_state(&self) -> DiskState {
5231 unimplemented!()
5232 }
5233
5234 fn file_name<'a>(&'a self, _: &'a gpui::App) -> &'a str {
5235 self.path().file_name().unwrap_or(self.root_name.as_ref())
5236 }
5237
5238 fn worktree_id(&self, _: &App) -> WorktreeId {
5239 WorktreeId::from_usize(0)
5240 }
5241
5242 fn to_proto(&self, _: &App) -> rpc::proto::File {
5243 unimplemented!()
5244 }
5245
5246 fn is_private(&self) -> bool {
5247 false
5248 }
5249
5250 fn path_style(&self, _cx: &App) -> PathStyle {
5251 PathStyle::local()
5252 }
5253}
5254
5255#[cfg(any(test, feature = "test-support"))]
5256impl LocalFile for TestFile {
5257 fn abs_path(&self, _cx: &App) -> PathBuf {
5258 PathBuf::from(self.local_root.as_ref().unwrap())
5259 .join(&self.root_name)
5260 .join(self.path.as_std_path())
5261 }
5262
5263 fn load(
5264 &self,
5265 _cx: &App,
5266 _options: &EncodingOptions,
5267 _buffer_encoding: Option<Arc<Encoding>>,
5268 ) -> Task<Result<String>> {
5269 unimplemented!()
5270 }
5271
5272 fn load_bytes(&self, _cx: &App) -> Task<Result<Vec<u8>>> {
5273 unimplemented!()
5274 }
5275}
5276
5277pub(crate) fn contiguous_ranges(
5278 values: impl Iterator<Item = u32>,
5279 max_len: usize,
5280) -> impl Iterator<Item = Range<u32>> {
5281 let mut values = values;
5282 let mut current_range: Option<Range<u32>> = None;
5283 std::iter::from_fn(move || {
5284 loop {
5285 if let Some(value) = values.next() {
5286 if let Some(range) = &mut current_range
5287 && value == range.end
5288 && range.len() < max_len
5289 {
5290 range.end += 1;
5291 continue;
5292 }
5293
5294 let prev_range = current_range.clone();
5295 current_range = Some(value..(value + 1));
5296 if prev_range.is_some() {
5297 return prev_range;
5298 }
5299 } else {
5300 return current_range.take();
5301 }
5302 }
5303 })
5304}
5305
5306#[derive(Default, Debug)]
5307pub struct CharClassifier {
5308 scope: Option<LanguageScope>,
5309 scope_context: Option<CharScopeContext>,
5310 ignore_punctuation: bool,
5311}
5312
5313impl CharClassifier {
5314 pub fn new(scope: Option<LanguageScope>) -> Self {
5315 Self {
5316 scope,
5317 scope_context: None,
5318 ignore_punctuation: false,
5319 }
5320 }
5321
5322 pub fn scope_context(self, scope_context: Option<CharScopeContext>) -> Self {
5323 Self {
5324 scope_context,
5325 ..self
5326 }
5327 }
5328
5329 pub fn ignore_punctuation(self, ignore_punctuation: bool) -> Self {
5330 Self {
5331 ignore_punctuation,
5332 ..self
5333 }
5334 }
5335
5336 pub fn is_whitespace(&self, c: char) -> bool {
5337 self.kind(c) == CharKind::Whitespace
5338 }
5339
5340 pub fn is_word(&self, c: char) -> bool {
5341 self.kind(c) == CharKind::Word
5342 }
5343
5344 pub fn is_punctuation(&self, c: char) -> bool {
5345 self.kind(c) == CharKind::Punctuation
5346 }
5347
5348 pub fn kind_with(&self, c: char, ignore_punctuation: bool) -> CharKind {
5349 if c.is_alphanumeric() || c == '_' {
5350 return CharKind::Word;
5351 }
5352
5353 if let Some(scope) = &self.scope {
5354 let characters = match self.scope_context {
5355 Some(CharScopeContext::Completion) => scope.completion_query_characters(),
5356 Some(CharScopeContext::LinkedEdit) => scope.linked_edit_characters(),
5357 None => scope.word_characters(),
5358 };
5359 if let Some(characters) = characters
5360 && characters.contains(&c)
5361 {
5362 return CharKind::Word;
5363 }
5364 }
5365
5366 if c.is_whitespace() {
5367 return CharKind::Whitespace;
5368 }
5369
5370 if ignore_punctuation {
5371 CharKind::Word
5372 } else {
5373 CharKind::Punctuation
5374 }
5375 }
5376
5377 pub fn kind(&self, c: char) -> CharKind {
5378 self.kind_with(c, self.ignore_punctuation)
5379 }
5380}
5381
5382/// Find all of the ranges of whitespace that occur at the ends of lines
5383/// in the given rope.
5384///
5385/// This could also be done with a regex search, but this implementation
5386/// avoids copying text.
5387pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
5388 let mut ranges = Vec::new();
5389
5390 let mut offset = 0;
5391 let mut prev_chunk_trailing_whitespace_range = 0..0;
5392 for chunk in rope.chunks() {
5393 let mut prev_line_trailing_whitespace_range = 0..0;
5394 for (i, line) in chunk.split('\n').enumerate() {
5395 let line_end_offset = offset + line.len();
5396 let trimmed_line_len = line.trim_end_matches([' ', '\t']).len();
5397 let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
5398
5399 if i == 0 && trimmed_line_len == 0 {
5400 trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
5401 }
5402 if !prev_line_trailing_whitespace_range.is_empty() {
5403 ranges.push(prev_line_trailing_whitespace_range);
5404 }
5405
5406 offset = line_end_offset + 1;
5407 prev_line_trailing_whitespace_range = trailing_whitespace_range;
5408 }
5409
5410 offset -= 1;
5411 prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
5412 }
5413
5414 if !prev_chunk_trailing_whitespace_range.is_empty() {
5415 ranges.push(prev_chunk_trailing_whitespace_range);
5416 }
5417
5418 ranges
5419}