1pub mod row_chunk;
2
3use crate::{
4 DebuggerTextObject, LanguageScope, Outline, OutlineConfig, RunnableCapture, RunnableTag,
5 TextObject, TreeSitterOptions,
6 diagnostic_set::{DiagnosticEntry, DiagnosticEntryRef, DiagnosticGroup},
7 language_settings::{LanguageSettings, language_settings},
8 outline::OutlineItem,
9 row_chunk::RowChunks,
10 syntax_map::{
11 SyntaxLayer, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures, SyntaxMapMatch,
12 SyntaxMapMatches, SyntaxSnapshot, ToTreeSitterPoint,
13 },
14 task_context::RunnableRange,
15 text_diff::text_diff,
16};
17pub use crate::{
18 Grammar, Language, LanguageRegistry,
19 diagnostic_set::DiagnosticSet,
20 highlight_map::{HighlightId, HighlightMap},
21 proto,
22};
23use anyhow::{Context as _, Result};
24use clock::Lamport;
25pub use clock::ReplicaId;
26use collections::HashMap;
27use fs::MTime;
28use futures::channel::oneshot;
29use gpui::{
30 App, AppContext as _, Context, Entity, EventEmitter, HighlightStyle, SharedString, StyledText,
31 Task, TaskLabel, TextStyle,
32};
33
34use lsp::{LanguageServerId, NumberOrString};
35use parking_lot::{Mutex, RawMutex, lock_api::MutexGuard};
36use serde::{Deserialize, Serialize};
37use serde_json::Value;
38use settings::WorktreeId;
39use smallvec::SmallVec;
40use smol::future::yield_now;
41use std::{
42 any::Any,
43 borrow::Cow,
44 cell::Cell,
45 cmp::{self, Ordering, Reverse},
46 collections::{BTreeMap, BTreeSet},
47 future::Future,
48 iter::{self, Iterator, Peekable},
49 mem,
50 num::NonZeroU32,
51 ops::{Deref, Range},
52 path::PathBuf,
53 rc,
54 sync::{Arc, LazyLock},
55 time::{Duration, Instant},
56 vec,
57};
58use sum_tree::TreeMap;
59use text::operation_queue::OperationQueue;
60use text::*;
61pub use text::{
62 Anchor, Bias, Buffer as TextBuffer, BufferId, BufferSnapshot as TextBufferSnapshot, Edit,
63 LineIndent, OffsetRangeExt, OffsetUtf16, Patch, Point, PointUtf16, Rope, Selection,
64 SelectionGoal, Subscription, TextDimension, TextSummary, ToOffset, ToOffsetUtf16, ToPoint,
65 ToPointUtf16, Transaction, TransactionId, Unclipped,
66};
67use theme::{ActiveTheme as _, SyntaxTheme};
68#[cfg(any(test, feature = "test-support"))]
69use util::RandomCharIter;
70use util::{RangeExt, debug_panic, maybe, paths::PathStyle, rel_path::RelPath};
71
72#[cfg(any(test, feature = "test-support"))]
73pub use {tree_sitter_python, tree_sitter_rust, tree_sitter_typescript};
74
75pub use lsp::DiagnosticSeverity;
76
77/// A label for the background task spawned by the buffer to compute
78/// a diff against the contents of its file.
79pub static BUFFER_DIFF_TASK: LazyLock<TaskLabel> = LazyLock::new(TaskLabel::new);
80
81/// Indicate whether a [`Buffer`] has permissions to edit.
82#[derive(PartialEq, Clone, Copy, Debug)]
83pub enum Capability {
84 /// The buffer is a mutable replica.
85 ReadWrite,
86 /// The buffer is a read-only replica.
87 ReadOnly,
88}
89
90pub type BufferRow = u32;
91
92/// An in-memory representation of a source code file, including its text,
93/// syntax trees, git status, and diagnostics.
94pub struct Buffer {
95 text: TextBuffer,
96 branch_state: Option<BufferBranchState>,
97 /// Filesystem state, `None` when there is no path.
98 file: Option<Arc<dyn File>>,
99 /// The mtime of the file when this buffer was last loaded from
100 /// or saved to disk.
101 saved_mtime: Option<MTime>,
102 /// The version vector when this buffer was last loaded from
103 /// or saved to disk.
104 saved_version: clock::Global,
105 preview_version: clock::Global,
106 transaction_depth: usize,
107 was_dirty_before_starting_transaction: Option<bool>,
108 reload_task: Option<Task<Result<()>>>,
109 language: Option<Arc<Language>>,
110 autoindent_requests: Vec<Arc<AutoindentRequest>>,
111 wait_for_autoindent_txs: Vec<oneshot::Sender<()>>,
112 pending_autoindent: Option<Task<()>>,
113 sync_parse_timeout: Duration,
114 syntax_map: Mutex<SyntaxMap>,
115 reparse: Option<Task<()>>,
116 parse_status: (watch::Sender<ParseStatus>, watch::Receiver<ParseStatus>),
117 non_text_state_update_count: usize,
118 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
119 remote_selections: TreeMap<ReplicaId, SelectionSet>,
120 diagnostics_timestamp: clock::Lamport,
121 completion_triggers: BTreeSet<String>,
122 completion_triggers_per_language_server: HashMap<LanguageServerId, BTreeSet<String>>,
123 completion_triggers_timestamp: clock::Lamport,
124 deferred_ops: OperationQueue<Operation>,
125 capability: Capability,
126 has_conflict: bool,
127 /// Memoize calls to has_changes_since(saved_version).
128 /// The contents of a cell are (self.version, has_changes) at the time of a last call.
129 has_unsaved_edits: Cell<(clock::Global, bool)>,
130 change_bits: Vec<rc::Weak<Cell<bool>>>,
131 _subscriptions: Vec<gpui::Subscription>,
132 tree_sitter_data: Arc<Mutex<TreeSitterData>>,
133}
134
135#[derive(Debug, Clone)]
136pub struct TreeSitterData {
137 chunks: RowChunks,
138 brackets_by_chunks: Vec<Option<Vec<BracketMatch>>>,
139}
140
141const MAX_ROWS_IN_A_CHUNK: u32 = 50;
142
143impl TreeSitterData {
144 fn clear(&mut self) {
145 self.brackets_by_chunks = vec![None; self.chunks.len()];
146 }
147
148 fn new(snapshot: text::BufferSnapshot) -> Self {
149 let chunks = RowChunks::new(snapshot, MAX_ROWS_IN_A_CHUNK);
150 Self {
151 brackets_by_chunks: vec![None; chunks.len()],
152 chunks,
153 }
154 }
155}
156
157#[derive(Copy, Clone, Debug, PartialEq, Eq)]
158pub enum ParseStatus {
159 Idle,
160 Parsing,
161}
162
163struct BufferBranchState {
164 base_buffer: Entity<Buffer>,
165 merged_operations: Vec<Lamport>,
166}
167
168/// An immutable, cheaply cloneable representation of a fixed
169/// state of a buffer.
170pub struct BufferSnapshot {
171 pub text: text::BufferSnapshot,
172 pub syntax: SyntaxSnapshot,
173 file: Option<Arc<dyn File>>,
174 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
175 remote_selections: TreeMap<ReplicaId, SelectionSet>,
176 language: Option<Arc<Language>>,
177 non_text_state_update_count: usize,
178 tree_sitter_data: Arc<Mutex<TreeSitterData>>,
179}
180
181/// The kind and amount of indentation in a particular line. For now,
182/// assumes that indentation is all the same character.
183#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
184pub struct IndentSize {
185 /// The number of bytes that comprise the indentation.
186 pub len: u32,
187 /// The kind of whitespace used for indentation.
188 pub kind: IndentKind,
189}
190
191/// A whitespace character that's used for indentation.
192#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
193pub enum IndentKind {
194 /// An ASCII space character.
195 #[default]
196 Space,
197 /// An ASCII tab character.
198 Tab,
199}
200
201/// The shape of a selection cursor.
202#[derive(Copy, Clone, Debug, Default, PartialEq, Eq)]
203pub enum CursorShape {
204 /// A vertical bar
205 #[default]
206 Bar,
207 /// A block that surrounds the following character
208 Block,
209 /// An underline that runs along the following character
210 Underline,
211 /// A box drawn around the following character
212 Hollow,
213}
214
215impl From<settings::CursorShape> for CursorShape {
216 fn from(shape: settings::CursorShape) -> Self {
217 match shape {
218 settings::CursorShape::Bar => CursorShape::Bar,
219 settings::CursorShape::Block => CursorShape::Block,
220 settings::CursorShape::Underline => CursorShape::Underline,
221 settings::CursorShape::Hollow => CursorShape::Hollow,
222 }
223 }
224}
225
226#[derive(Clone, Debug)]
227struct SelectionSet {
228 line_mode: bool,
229 cursor_shape: CursorShape,
230 selections: Arc<[Selection<Anchor>]>,
231 lamport_timestamp: clock::Lamport,
232}
233
234/// A diagnostic associated with a certain range of a buffer.
235#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
236pub struct Diagnostic {
237 /// The name of the service that produced this diagnostic.
238 pub source: Option<String>,
239 /// A machine-readable code that identifies this diagnostic.
240 pub code: Option<NumberOrString>,
241 pub code_description: Option<lsp::Uri>,
242 /// Whether this diagnostic is a hint, warning, or error.
243 pub severity: DiagnosticSeverity,
244 /// The human-readable message associated with this diagnostic.
245 pub message: String,
246 /// The human-readable message (in markdown format)
247 pub markdown: Option<String>,
248 /// An id that identifies the group to which this diagnostic belongs.
249 ///
250 /// When a language server produces a diagnostic with
251 /// one or more associated diagnostics, those diagnostics are all
252 /// assigned a single group ID.
253 pub group_id: usize,
254 /// Whether this diagnostic is the primary diagnostic for its group.
255 ///
256 /// In a given group, the primary diagnostic is the top-level diagnostic
257 /// returned by the language server. The non-primary diagnostics are the
258 /// associated diagnostics.
259 pub is_primary: bool,
260 /// Whether this diagnostic is considered to originate from an analysis of
261 /// files on disk, as opposed to any unsaved buffer contents. This is a
262 /// property of a given diagnostic source, and is configured for a given
263 /// language server via the [`LspAdapter::disk_based_diagnostic_sources`](crate::LspAdapter::disk_based_diagnostic_sources) method
264 /// for the language server.
265 pub is_disk_based: bool,
266 /// Whether this diagnostic marks unnecessary code.
267 pub is_unnecessary: bool,
268 /// Quick separation of diagnostics groups based by their source.
269 pub source_kind: DiagnosticSourceKind,
270 /// Data from language server that produced this diagnostic. Passed back to the LS when we request code actions for this diagnostic.
271 pub data: Option<Value>,
272 /// Whether to underline the corresponding text range in the editor.
273 pub underline: bool,
274}
275
276#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)]
277pub enum DiagnosticSourceKind {
278 Pulled,
279 Pushed,
280 Other,
281}
282
283/// An operation used to synchronize this buffer with its other replicas.
284#[derive(Clone, Debug, PartialEq)]
285pub enum Operation {
286 /// A text operation.
287 Buffer(text::Operation),
288
289 /// An update to the buffer's diagnostics.
290 UpdateDiagnostics {
291 /// The id of the language server that produced the new diagnostics.
292 server_id: LanguageServerId,
293 /// The diagnostics.
294 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
295 /// The buffer's lamport timestamp.
296 lamport_timestamp: clock::Lamport,
297 },
298
299 /// An update to the most recent selections in this buffer.
300 UpdateSelections {
301 /// The selections.
302 selections: Arc<[Selection<Anchor>]>,
303 /// The buffer's lamport timestamp.
304 lamport_timestamp: clock::Lamport,
305 /// Whether the selections are in 'line mode'.
306 line_mode: bool,
307 /// The [`CursorShape`] associated with these selections.
308 cursor_shape: CursorShape,
309 },
310
311 /// An update to the characters that should trigger autocompletion
312 /// for this buffer.
313 UpdateCompletionTriggers {
314 /// The characters that trigger autocompletion.
315 triggers: Vec<String>,
316 /// The buffer's lamport timestamp.
317 lamport_timestamp: clock::Lamport,
318 /// The language server ID.
319 server_id: LanguageServerId,
320 },
321
322 /// An update to the line ending type of this buffer.
323 UpdateLineEnding {
324 /// The line ending type.
325 line_ending: LineEnding,
326 /// The buffer's lamport timestamp.
327 lamport_timestamp: clock::Lamport,
328 },
329}
330
331/// An event that occurs in a buffer.
332#[derive(Clone, Debug, PartialEq)]
333pub enum BufferEvent {
334 /// The buffer was changed in a way that must be
335 /// propagated to its other replicas.
336 Operation {
337 operation: Operation,
338 is_local: bool,
339 },
340 /// The buffer was edited.
341 Edited,
342 /// The buffer's `dirty` bit changed.
343 DirtyChanged,
344 /// The buffer was saved.
345 Saved,
346 /// The buffer's file was changed on disk.
347 FileHandleChanged,
348 /// The buffer was reloaded.
349 Reloaded,
350 /// The buffer is in need of a reload
351 ReloadNeeded,
352 /// The buffer's language was changed.
353 LanguageChanged,
354 /// The buffer's syntax trees were updated.
355 Reparsed,
356 /// The buffer's diagnostics were updated.
357 DiagnosticsUpdated,
358 /// The buffer gained or lost editing capabilities.
359 CapabilityChanged,
360}
361
362/// The file associated with a buffer.
363pub trait File: Send + Sync + Any {
364 /// Returns the [`LocalFile`] associated with this file, if the
365 /// file is local.
366 fn as_local(&self) -> Option<&dyn LocalFile>;
367
368 /// Returns whether this file is local.
369 fn is_local(&self) -> bool {
370 self.as_local().is_some()
371 }
372
373 /// Returns whether the file is new, exists in storage, or has been deleted. Includes metadata
374 /// only available in some states, such as modification time.
375 fn disk_state(&self) -> DiskState;
376
377 /// Returns the path of this file relative to the worktree's root directory.
378 fn path(&self) -> &Arc<RelPath>;
379
380 /// Returns the path of this file relative to the worktree's parent directory (this means it
381 /// includes the name of the worktree's root folder).
382 fn full_path(&self, cx: &App) -> PathBuf;
383
384 /// Returns the path style of this file.
385 fn path_style(&self, cx: &App) -> PathStyle;
386
387 /// Returns the last component of this handle's absolute path. If this handle refers to the root
388 /// of its worktree, then this method will return the name of the worktree itself.
389 fn file_name<'a>(&'a self, cx: &'a App) -> &'a str;
390
391 /// Returns the id of the worktree to which this file belongs.
392 ///
393 /// This is needed for looking up project-specific settings.
394 fn worktree_id(&self, cx: &App) -> WorktreeId;
395
396 /// Converts this file into a protobuf message.
397 fn to_proto(&self, cx: &App) -> rpc::proto::File;
398
399 /// Return whether Zed considers this to be a private file.
400 fn is_private(&self) -> bool;
401}
402
403/// The file's storage status - whether it's stored (`Present`), and if so when it was last
404/// modified. In the case where the file is not stored, it can be either `New` or `Deleted`. In the
405/// UI these two states are distinguished. For example, the buffer tab does not display a deletion
406/// indicator for new files.
407#[derive(Copy, Clone, Debug, PartialEq)]
408pub enum DiskState {
409 /// File created in Zed that has not been saved.
410 New,
411 /// File present on the filesystem.
412 Present { mtime: MTime },
413 /// Deleted file that was previously present.
414 Deleted,
415}
416
417impl DiskState {
418 /// Returns the file's last known modification time on disk.
419 pub fn mtime(self) -> Option<MTime> {
420 match self {
421 DiskState::New => None,
422 DiskState::Present { mtime } => Some(mtime),
423 DiskState::Deleted => None,
424 }
425 }
426
427 pub fn exists(&self) -> bool {
428 match self {
429 DiskState::New => false,
430 DiskState::Present { .. } => true,
431 DiskState::Deleted => false,
432 }
433 }
434}
435
436/// The file associated with a buffer, in the case where the file is on the local disk.
437pub trait LocalFile: File {
438 /// Returns the absolute path of this file
439 fn abs_path(&self, cx: &App) -> PathBuf;
440
441 /// Loads the file contents from disk and returns them as a UTF-8 encoded string.
442 fn load(&self, cx: &App) -> Task<Result<String>>;
443
444 /// Loads the file's contents from disk.
445 fn load_bytes(&self, cx: &App) -> Task<Result<Vec<u8>>>;
446}
447
448/// The auto-indent behavior associated with an editing operation.
449/// For some editing operations, each affected line of text has its
450/// indentation recomputed. For other operations, the entire block
451/// of edited text is adjusted uniformly.
452#[derive(Clone, Debug)]
453pub enum AutoindentMode {
454 /// Indent each line of inserted text.
455 EachLine,
456 /// Apply the same indentation adjustment to all of the lines
457 /// in a given insertion.
458 Block {
459 /// The original indentation column of the first line of each
460 /// insertion, if it has been copied.
461 ///
462 /// Knowing this makes it possible to preserve the relative indentation
463 /// of every line in the insertion from when it was copied.
464 ///
465 /// If the original indent column is `a`, and the first line of insertion
466 /// is then auto-indented to column `b`, then every other line of
467 /// the insertion will be auto-indented to column `b - a`
468 original_indent_columns: Vec<Option<u32>>,
469 },
470}
471
472#[derive(Clone)]
473struct AutoindentRequest {
474 before_edit: BufferSnapshot,
475 entries: Vec<AutoindentRequestEntry>,
476 is_block_mode: bool,
477 ignore_empty_lines: bool,
478}
479
480#[derive(Debug, Clone)]
481struct AutoindentRequestEntry {
482 /// A range of the buffer whose indentation should be adjusted.
483 range: Range<Anchor>,
484 /// Whether or not these lines should be considered brand new, for the
485 /// purpose of auto-indent. When text is not new, its indentation will
486 /// only be adjusted if the suggested indentation level has *changed*
487 /// since the edit was made.
488 first_line_is_new: bool,
489 indent_size: IndentSize,
490 original_indent_column: Option<u32>,
491}
492
493#[derive(Debug)]
494struct IndentSuggestion {
495 basis_row: u32,
496 delta: Ordering,
497 within_error: bool,
498}
499
500struct BufferChunkHighlights<'a> {
501 captures: SyntaxMapCaptures<'a>,
502 next_capture: Option<SyntaxMapCapture<'a>>,
503 stack: Vec<(usize, HighlightId)>,
504 highlight_maps: Vec<HighlightMap>,
505}
506
507/// An iterator that yields chunks of a buffer's text, along with their
508/// syntax highlights and diagnostic status.
509pub struct BufferChunks<'a> {
510 buffer_snapshot: Option<&'a BufferSnapshot>,
511 range: Range<usize>,
512 chunks: text::Chunks<'a>,
513 diagnostic_endpoints: Option<Peekable<vec::IntoIter<DiagnosticEndpoint>>>,
514 error_depth: usize,
515 warning_depth: usize,
516 information_depth: usize,
517 hint_depth: usize,
518 unnecessary_depth: usize,
519 underline: bool,
520 highlights: Option<BufferChunkHighlights<'a>>,
521}
522
523/// A chunk of a buffer's text, along with its syntax highlight and
524/// diagnostic status.
525#[derive(Clone, Debug, Default)]
526pub struct Chunk<'a> {
527 /// The text of the chunk.
528 pub text: &'a str,
529 /// The syntax highlighting style of the chunk.
530 pub syntax_highlight_id: Option<HighlightId>,
531 /// The highlight style that has been applied to this chunk in
532 /// the editor.
533 pub highlight_style: Option<HighlightStyle>,
534 /// The severity of diagnostic associated with this chunk, if any.
535 pub diagnostic_severity: Option<DiagnosticSeverity>,
536 /// A bitset of which characters are tabs in this string.
537 pub tabs: u128,
538 /// Bitmap of character indices in this chunk
539 pub chars: u128,
540 /// Whether this chunk of text is marked as unnecessary.
541 pub is_unnecessary: bool,
542 /// Whether this chunk of text was originally a tab character.
543 pub is_tab: bool,
544 /// Whether this chunk of text was originally an inlay.
545 pub is_inlay: bool,
546 /// Whether to underline the corresponding text range in the editor.
547 pub underline: bool,
548}
549
550/// A set of edits to a given version of a buffer, computed asynchronously.
551#[derive(Debug)]
552pub struct Diff {
553 pub base_version: clock::Global,
554 pub line_ending: LineEnding,
555 pub edits: Vec<(Range<usize>, Arc<str>)>,
556}
557
558#[derive(Debug, Clone, Copy)]
559pub(crate) struct DiagnosticEndpoint {
560 offset: usize,
561 is_start: bool,
562 underline: bool,
563 severity: DiagnosticSeverity,
564 is_unnecessary: bool,
565}
566
567/// A class of characters, used for characterizing a run of text.
568#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
569pub enum CharKind {
570 /// Whitespace.
571 Whitespace,
572 /// Punctuation.
573 Punctuation,
574 /// Word.
575 Word,
576}
577
578/// Context for character classification within a specific scope.
579#[derive(Copy, Clone, Eq, PartialEq, Debug)]
580pub enum CharScopeContext {
581 /// Character classification for completion queries.
582 ///
583 /// This context treats certain characters as word constituents that would
584 /// normally be considered punctuation, such as '-' in Tailwind classes
585 /// ("bg-yellow-100") or '.' in import paths ("foo.ts").
586 Completion,
587 /// Character classification for linked edits.
588 ///
589 /// This context handles characters that should be treated as part of
590 /// identifiers during linked editing operations, such as '.' in JSX
591 /// component names like `<Animated.View>`.
592 LinkedEdit,
593}
594
595/// A runnable is a set of data about a region that could be resolved into a task
596pub struct Runnable {
597 pub tags: SmallVec<[RunnableTag; 1]>,
598 pub language: Arc<Language>,
599 pub buffer: BufferId,
600}
601
602#[derive(Default, Clone, Debug)]
603pub struct HighlightedText {
604 pub text: SharedString,
605 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
606}
607
608#[derive(Default, Debug)]
609struct HighlightedTextBuilder {
610 pub text: String,
611 highlights: Vec<(Range<usize>, HighlightStyle)>,
612}
613
614impl HighlightedText {
615 pub fn from_buffer_range<T: ToOffset>(
616 range: Range<T>,
617 snapshot: &text::BufferSnapshot,
618 syntax_snapshot: &SyntaxSnapshot,
619 override_style: Option<HighlightStyle>,
620 syntax_theme: &SyntaxTheme,
621 ) -> Self {
622 let mut highlighted_text = HighlightedTextBuilder::default();
623 highlighted_text.add_text_from_buffer_range(
624 range,
625 snapshot,
626 syntax_snapshot,
627 override_style,
628 syntax_theme,
629 );
630 highlighted_text.build()
631 }
632
633 pub fn to_styled_text(&self, default_style: &TextStyle) -> StyledText {
634 gpui::StyledText::new(self.text.clone())
635 .with_default_highlights(default_style, self.highlights.iter().cloned())
636 }
637
638 /// Returns the first line without leading whitespace unless highlighted
639 /// and a boolean indicating if there are more lines after
640 pub fn first_line_preview(self) -> (Self, bool) {
641 let newline_ix = self.text.find('\n').unwrap_or(self.text.len());
642 let first_line = &self.text[..newline_ix];
643
644 // Trim leading whitespace, unless an edit starts prior to it.
645 let mut preview_start_ix = first_line.len() - first_line.trim_start().len();
646 if let Some((first_highlight_range, _)) = self.highlights.first() {
647 preview_start_ix = preview_start_ix.min(first_highlight_range.start);
648 }
649
650 let preview_text = &first_line[preview_start_ix..];
651 let preview_highlights = self
652 .highlights
653 .into_iter()
654 .skip_while(|(range, _)| range.end <= preview_start_ix)
655 .take_while(|(range, _)| range.start < newline_ix)
656 .filter_map(|(mut range, highlight)| {
657 range.start = range.start.saturating_sub(preview_start_ix);
658 range.end = range.end.min(newline_ix).saturating_sub(preview_start_ix);
659 if range.is_empty() {
660 None
661 } else {
662 Some((range, highlight))
663 }
664 });
665
666 let preview = Self {
667 text: SharedString::new(preview_text),
668 highlights: preview_highlights.collect(),
669 };
670
671 (preview, self.text.len() > newline_ix)
672 }
673}
674
675impl HighlightedTextBuilder {
676 pub fn build(self) -> HighlightedText {
677 HighlightedText {
678 text: self.text.into(),
679 highlights: self.highlights,
680 }
681 }
682
683 pub fn add_text_from_buffer_range<T: ToOffset>(
684 &mut self,
685 range: Range<T>,
686 snapshot: &text::BufferSnapshot,
687 syntax_snapshot: &SyntaxSnapshot,
688 override_style: Option<HighlightStyle>,
689 syntax_theme: &SyntaxTheme,
690 ) {
691 let range = range.to_offset(snapshot);
692 for chunk in Self::highlighted_chunks(range, snapshot, syntax_snapshot) {
693 let start = self.text.len();
694 self.text.push_str(chunk.text);
695 let end = self.text.len();
696
697 if let Some(highlight_style) = chunk
698 .syntax_highlight_id
699 .and_then(|id| id.style(syntax_theme))
700 {
701 let highlight_style = override_style.map_or(highlight_style, |override_style| {
702 highlight_style.highlight(override_style)
703 });
704 self.highlights.push((start..end, highlight_style));
705 } else if let Some(override_style) = override_style {
706 self.highlights.push((start..end, override_style));
707 }
708 }
709 }
710
711 fn highlighted_chunks<'a>(
712 range: Range<usize>,
713 snapshot: &'a text::BufferSnapshot,
714 syntax_snapshot: &'a SyntaxSnapshot,
715 ) -> BufferChunks<'a> {
716 let captures = syntax_snapshot.captures(range.clone(), snapshot, |grammar| {
717 grammar
718 .highlights_config
719 .as_ref()
720 .map(|config| &config.query)
721 });
722
723 let highlight_maps = captures
724 .grammars()
725 .iter()
726 .map(|grammar| grammar.highlight_map())
727 .collect();
728
729 BufferChunks::new(
730 snapshot.as_rope(),
731 range,
732 Some((captures, highlight_maps)),
733 false,
734 None,
735 )
736 }
737}
738
739#[derive(Clone)]
740pub struct EditPreview {
741 old_snapshot: text::BufferSnapshot,
742 applied_edits_snapshot: text::BufferSnapshot,
743 syntax_snapshot: SyntaxSnapshot,
744}
745
746impl EditPreview {
747 pub fn highlight_edits(
748 &self,
749 current_snapshot: &BufferSnapshot,
750 edits: &[(Range<Anchor>, String)],
751 include_deletions: bool,
752 cx: &App,
753 ) -> HighlightedText {
754 let Some(visible_range_in_preview_snapshot) = self.compute_visible_range(edits) else {
755 return HighlightedText::default();
756 };
757
758 let mut highlighted_text = HighlightedTextBuilder::default();
759
760 let mut offset_in_preview_snapshot = visible_range_in_preview_snapshot.start;
761
762 let insertion_highlight_style = HighlightStyle {
763 background_color: Some(cx.theme().status().created_background),
764 ..Default::default()
765 };
766 let deletion_highlight_style = HighlightStyle {
767 background_color: Some(cx.theme().status().deleted_background),
768 ..Default::default()
769 };
770 let syntax_theme = cx.theme().syntax();
771
772 for (range, edit_text) in edits {
773 let edit_new_end_in_preview_snapshot = range
774 .end
775 .bias_right(&self.old_snapshot)
776 .to_offset(&self.applied_edits_snapshot);
777 let edit_start_in_preview_snapshot = edit_new_end_in_preview_snapshot - edit_text.len();
778
779 let unchanged_range_in_preview_snapshot =
780 offset_in_preview_snapshot..edit_start_in_preview_snapshot;
781 if !unchanged_range_in_preview_snapshot.is_empty() {
782 highlighted_text.add_text_from_buffer_range(
783 unchanged_range_in_preview_snapshot,
784 &self.applied_edits_snapshot,
785 &self.syntax_snapshot,
786 None,
787 syntax_theme,
788 );
789 }
790
791 let range_in_current_snapshot = range.to_offset(current_snapshot);
792 if include_deletions && !range_in_current_snapshot.is_empty() {
793 highlighted_text.add_text_from_buffer_range(
794 range_in_current_snapshot,
795 ¤t_snapshot.text,
796 ¤t_snapshot.syntax,
797 Some(deletion_highlight_style),
798 syntax_theme,
799 );
800 }
801
802 if !edit_text.is_empty() {
803 highlighted_text.add_text_from_buffer_range(
804 edit_start_in_preview_snapshot..edit_new_end_in_preview_snapshot,
805 &self.applied_edits_snapshot,
806 &self.syntax_snapshot,
807 Some(insertion_highlight_style),
808 syntax_theme,
809 );
810 }
811
812 offset_in_preview_snapshot = edit_new_end_in_preview_snapshot;
813 }
814
815 highlighted_text.add_text_from_buffer_range(
816 offset_in_preview_snapshot..visible_range_in_preview_snapshot.end,
817 &self.applied_edits_snapshot,
818 &self.syntax_snapshot,
819 None,
820 syntax_theme,
821 );
822
823 highlighted_text.build()
824 }
825
826 fn compute_visible_range(&self, edits: &[(Range<Anchor>, String)]) -> Option<Range<usize>> {
827 let (first, _) = edits.first()?;
828 let (last, _) = edits.last()?;
829
830 let start = first
831 .start
832 .bias_left(&self.old_snapshot)
833 .to_point(&self.applied_edits_snapshot);
834 let end = last
835 .end
836 .bias_right(&self.old_snapshot)
837 .to_point(&self.applied_edits_snapshot);
838
839 // Ensure that the first line of the first edit and the last line of the last edit are always fully visible
840 let range = Point::new(start.row, 0)
841 ..Point::new(end.row, self.applied_edits_snapshot.line_len(end.row));
842
843 Some(range.to_offset(&self.applied_edits_snapshot))
844 }
845}
846
847#[derive(Clone, Debug, PartialEq, Eq)]
848pub struct BracketMatch {
849 pub open_range: Range<usize>,
850 pub close_range: Range<usize>,
851 pub newline_only: bool,
852 pub depth: usize,
853}
854
855impl BracketMatch {
856 pub fn bracket_ranges(self) -> (Range<usize>, Range<usize>) {
857 (self.open_range, self.close_range)
858 }
859}
860
861impl Buffer {
862 /// Create a new buffer with the given base text.
863 pub fn local<T: Into<String>>(base_text: T, cx: &Context<Self>) -> Self {
864 Self::build(
865 TextBuffer::new(
866 ReplicaId::LOCAL,
867 cx.entity_id().as_non_zero_u64().into(),
868 base_text.into(),
869 ),
870 None,
871 Capability::ReadWrite,
872 )
873 }
874
875 /// Create a new buffer with the given base text that has proper line endings and other normalization applied.
876 pub fn local_normalized(
877 base_text_normalized: Rope,
878 line_ending: LineEnding,
879 cx: &Context<Self>,
880 ) -> Self {
881 Self::build(
882 TextBuffer::new_normalized(
883 ReplicaId::LOCAL,
884 cx.entity_id().as_non_zero_u64().into(),
885 line_ending,
886 base_text_normalized,
887 ),
888 None,
889 Capability::ReadWrite,
890 )
891 }
892
893 /// Create a new buffer that is a replica of a remote buffer.
894 pub fn remote(
895 remote_id: BufferId,
896 replica_id: ReplicaId,
897 capability: Capability,
898 base_text: impl Into<String>,
899 ) -> Self {
900 Self::build(
901 TextBuffer::new(replica_id, remote_id, base_text.into()),
902 None,
903 capability,
904 )
905 }
906
907 /// Create a new buffer that is a replica of a remote buffer, populating its
908 /// state from the given protobuf message.
909 pub fn from_proto(
910 replica_id: ReplicaId,
911 capability: Capability,
912 message: proto::BufferState,
913 file: Option<Arc<dyn File>>,
914 ) -> Result<Self> {
915 let buffer_id = BufferId::new(message.id).context("Could not deserialize buffer_id")?;
916 let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text);
917 let mut this = Self::build(buffer, file, capability);
918 this.text.set_line_ending(proto::deserialize_line_ending(
919 rpc::proto::LineEnding::from_i32(message.line_ending).context("missing line_ending")?,
920 ));
921 this.saved_version = proto::deserialize_version(&message.saved_version);
922 this.saved_mtime = message.saved_mtime.map(|time| time.into());
923 Ok(this)
924 }
925
926 /// Serialize the buffer's state to a protobuf message.
927 pub fn to_proto(&self, cx: &App) -> proto::BufferState {
928 proto::BufferState {
929 id: self.remote_id().into(),
930 file: self.file.as_ref().map(|f| f.to_proto(cx)),
931 base_text: self.base_text().to_string(),
932 line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
933 saved_version: proto::serialize_version(&self.saved_version),
934 saved_mtime: self.saved_mtime.map(|time| time.into()),
935 }
936 }
937
938 /// Serialize as protobufs all of the changes to the buffer since the given version.
939 pub fn serialize_ops(
940 &self,
941 since: Option<clock::Global>,
942 cx: &App,
943 ) -> Task<Vec<proto::Operation>> {
944 let mut operations = Vec::new();
945 operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
946
947 operations.extend(self.remote_selections.iter().map(|(_, set)| {
948 proto::serialize_operation(&Operation::UpdateSelections {
949 selections: set.selections.clone(),
950 lamport_timestamp: set.lamport_timestamp,
951 line_mode: set.line_mode,
952 cursor_shape: set.cursor_shape,
953 })
954 }));
955
956 for (server_id, diagnostics) in &self.diagnostics {
957 operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
958 lamport_timestamp: self.diagnostics_timestamp,
959 server_id: *server_id,
960 diagnostics: diagnostics.iter().cloned().collect(),
961 }));
962 }
963
964 for (server_id, completions) in &self.completion_triggers_per_language_server {
965 operations.push(proto::serialize_operation(
966 &Operation::UpdateCompletionTriggers {
967 triggers: completions.iter().cloned().collect(),
968 lamport_timestamp: self.completion_triggers_timestamp,
969 server_id: *server_id,
970 },
971 ));
972 }
973
974 let text_operations = self.text.operations().clone();
975 cx.background_spawn(async move {
976 let since = since.unwrap_or_default();
977 operations.extend(
978 text_operations
979 .iter()
980 .filter(|(_, op)| !since.observed(op.timestamp()))
981 .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
982 );
983 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
984 operations
985 })
986 }
987
988 /// Assign a language to the buffer, returning the buffer.
989 pub fn with_language(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
990 self.set_language(Some(language), cx);
991 self
992 }
993
994 /// Returns the [`Capability`] of this buffer.
995 pub fn capability(&self) -> Capability {
996 self.capability
997 }
998
999 /// Whether this buffer can only be read.
1000 pub fn read_only(&self) -> bool {
1001 self.capability == Capability::ReadOnly
1002 }
1003
1004 /// Builds a [`Buffer`] with the given underlying [`TextBuffer`], diff base, [`File`] and [`Capability`].
1005 pub fn build(buffer: TextBuffer, file: Option<Arc<dyn File>>, capability: Capability) -> Self {
1006 let saved_mtime = file.as_ref().and_then(|file| file.disk_state().mtime());
1007 let snapshot = buffer.snapshot();
1008 let syntax_map = Mutex::new(SyntaxMap::new(&snapshot));
1009 let tree_sitter_data = TreeSitterData::new(snapshot);
1010 Self {
1011 saved_mtime,
1012 tree_sitter_data: Arc::new(Mutex::new(tree_sitter_data)),
1013 saved_version: buffer.version(),
1014 preview_version: buffer.version(),
1015 reload_task: None,
1016 transaction_depth: 0,
1017 was_dirty_before_starting_transaction: None,
1018 has_unsaved_edits: Cell::new((buffer.version(), false)),
1019 text: buffer,
1020 branch_state: None,
1021 file,
1022 capability,
1023 syntax_map,
1024 reparse: None,
1025 non_text_state_update_count: 0,
1026 sync_parse_timeout: Duration::from_millis(1),
1027 parse_status: watch::channel(ParseStatus::Idle),
1028 autoindent_requests: Default::default(),
1029 wait_for_autoindent_txs: Default::default(),
1030 pending_autoindent: Default::default(),
1031 language: None,
1032 remote_selections: Default::default(),
1033 diagnostics: Default::default(),
1034 diagnostics_timestamp: Lamport::MIN,
1035 completion_triggers: Default::default(),
1036 completion_triggers_per_language_server: Default::default(),
1037 completion_triggers_timestamp: Lamport::MIN,
1038 deferred_ops: OperationQueue::new(),
1039 has_conflict: false,
1040 change_bits: Default::default(),
1041 _subscriptions: Vec::new(),
1042 }
1043 }
1044
1045 pub fn build_snapshot(
1046 text: Rope,
1047 language: Option<Arc<Language>>,
1048 language_registry: Option<Arc<LanguageRegistry>>,
1049 cx: &mut App,
1050 ) -> impl Future<Output = BufferSnapshot> + use<> {
1051 let entity_id = cx.reserve_entity::<Self>().entity_id();
1052 let buffer_id = entity_id.as_non_zero_u64().into();
1053 async move {
1054 let text =
1055 TextBuffer::new_normalized(ReplicaId::LOCAL, buffer_id, Default::default(), text)
1056 .snapshot();
1057 let mut syntax = SyntaxMap::new(&text).snapshot();
1058 if let Some(language) = language.clone() {
1059 let language_registry = language_registry.clone();
1060 syntax.reparse(&text, language_registry, language);
1061 }
1062 let tree_sitter_data = TreeSitterData::new(text.clone());
1063 BufferSnapshot {
1064 text,
1065 syntax,
1066 file: None,
1067 diagnostics: Default::default(),
1068 remote_selections: Default::default(),
1069 tree_sitter_data: Arc::new(Mutex::new(tree_sitter_data)),
1070 language,
1071 non_text_state_update_count: 0,
1072 }
1073 }
1074 }
1075
1076 pub fn build_empty_snapshot(cx: &mut App) -> BufferSnapshot {
1077 let entity_id = cx.reserve_entity::<Self>().entity_id();
1078 let buffer_id = entity_id.as_non_zero_u64().into();
1079 let text = TextBuffer::new_normalized(
1080 ReplicaId::LOCAL,
1081 buffer_id,
1082 Default::default(),
1083 Rope::new(),
1084 )
1085 .snapshot();
1086 let syntax = SyntaxMap::new(&text).snapshot();
1087 let tree_sitter_data = TreeSitterData::new(text.clone());
1088 BufferSnapshot {
1089 text,
1090 syntax,
1091 tree_sitter_data: Arc::new(Mutex::new(tree_sitter_data)),
1092 file: None,
1093 diagnostics: Default::default(),
1094 remote_selections: Default::default(),
1095 language: None,
1096 non_text_state_update_count: 0,
1097 }
1098 }
1099
1100 #[cfg(any(test, feature = "test-support"))]
1101 pub fn build_snapshot_sync(
1102 text: Rope,
1103 language: Option<Arc<Language>>,
1104 language_registry: Option<Arc<LanguageRegistry>>,
1105 cx: &mut App,
1106 ) -> BufferSnapshot {
1107 let entity_id = cx.reserve_entity::<Self>().entity_id();
1108 let buffer_id = entity_id.as_non_zero_u64().into();
1109 let text =
1110 TextBuffer::new_normalized(ReplicaId::LOCAL, buffer_id, Default::default(), text)
1111 .snapshot();
1112 let mut syntax = SyntaxMap::new(&text).snapshot();
1113 if let Some(language) = language.clone() {
1114 syntax.reparse(&text, language_registry, language);
1115 }
1116 let tree_sitter_data = TreeSitterData::new(text.clone());
1117 BufferSnapshot {
1118 text,
1119 syntax,
1120 tree_sitter_data: Arc::new(Mutex::new(tree_sitter_data)),
1121 file: None,
1122 diagnostics: Default::default(),
1123 remote_selections: Default::default(),
1124 language,
1125 non_text_state_update_count: 0,
1126 }
1127 }
1128
1129 /// Retrieve a snapshot of the buffer's current state. This is computationally
1130 /// cheap, and allows reading from the buffer on a background thread.
1131 pub fn snapshot(&self) -> BufferSnapshot {
1132 let text = self.text.snapshot();
1133 let mut syntax_map = self.syntax_map.lock();
1134 syntax_map.interpolate(&text);
1135 let syntax = syntax_map.snapshot();
1136
1137 BufferSnapshot {
1138 text,
1139 syntax,
1140 tree_sitter_data: self.tree_sitter_data.clone(),
1141 file: self.file.clone(),
1142 remote_selections: self.remote_selections.clone(),
1143 diagnostics: self.diagnostics.clone(),
1144 language: self.language.clone(),
1145 non_text_state_update_count: self.non_text_state_update_count,
1146 }
1147 }
1148
1149 pub fn branch(&mut self, cx: &mut Context<Self>) -> Entity<Self> {
1150 let this = cx.entity();
1151 cx.new(|cx| {
1152 let mut branch = Self {
1153 branch_state: Some(BufferBranchState {
1154 base_buffer: this.clone(),
1155 merged_operations: Default::default(),
1156 }),
1157 language: self.language.clone(),
1158 has_conflict: self.has_conflict,
1159 has_unsaved_edits: Cell::new(self.has_unsaved_edits.get_mut().clone()),
1160 _subscriptions: vec![cx.subscribe(&this, Self::on_base_buffer_event)],
1161 ..Self::build(self.text.branch(), self.file.clone(), self.capability())
1162 };
1163 if let Some(language_registry) = self.language_registry() {
1164 branch.set_language_registry(language_registry);
1165 }
1166
1167 // Reparse the branch buffer so that we get syntax highlighting immediately.
1168 branch.reparse(cx);
1169
1170 branch
1171 })
1172 }
1173
1174 pub fn preview_edits(
1175 &self,
1176 edits: Arc<[(Range<Anchor>, String)]>,
1177 cx: &App,
1178 ) -> Task<EditPreview> {
1179 let registry = self.language_registry();
1180 let language = self.language().cloned();
1181 let old_snapshot = self.text.snapshot();
1182 let mut branch_buffer = self.text.branch();
1183 let mut syntax_snapshot = self.syntax_map.lock().snapshot();
1184 cx.background_spawn(async move {
1185 if !edits.is_empty() {
1186 if let Some(language) = language.clone() {
1187 syntax_snapshot.reparse(&old_snapshot, registry.clone(), language);
1188 }
1189
1190 branch_buffer.edit(edits.iter().cloned());
1191 let snapshot = branch_buffer.snapshot();
1192 syntax_snapshot.interpolate(&snapshot);
1193
1194 if let Some(language) = language {
1195 syntax_snapshot.reparse(&snapshot, registry, language);
1196 }
1197 }
1198 EditPreview {
1199 old_snapshot,
1200 applied_edits_snapshot: branch_buffer.snapshot(),
1201 syntax_snapshot,
1202 }
1203 })
1204 }
1205
1206 /// Applies all of the changes in this buffer that intersect any of the
1207 /// given `ranges` to its base buffer.
1208 ///
1209 /// If `ranges` is empty, then all changes will be applied. This buffer must
1210 /// be a branch buffer to call this method.
1211 pub fn merge_into_base(&mut self, ranges: Vec<Range<usize>>, cx: &mut Context<Self>) {
1212 let Some(base_buffer) = self.base_buffer() else {
1213 debug_panic!("not a branch buffer");
1214 return;
1215 };
1216
1217 let mut ranges = if ranges.is_empty() {
1218 &[0..usize::MAX]
1219 } else {
1220 ranges.as_slice()
1221 }
1222 .iter()
1223 .peekable();
1224
1225 let mut edits = Vec::new();
1226 for edit in self.edits_since::<usize>(&base_buffer.read(cx).version()) {
1227 let mut is_included = false;
1228 while let Some(range) = ranges.peek() {
1229 if range.end < edit.new.start {
1230 ranges.next().unwrap();
1231 } else {
1232 if range.start <= edit.new.end {
1233 is_included = true;
1234 }
1235 break;
1236 }
1237 }
1238
1239 if is_included {
1240 edits.push((
1241 edit.old.clone(),
1242 self.text_for_range(edit.new.clone()).collect::<String>(),
1243 ));
1244 }
1245 }
1246
1247 let operation = base_buffer.update(cx, |base_buffer, cx| {
1248 // cx.emit(BufferEvent::DiffBaseChanged);
1249 base_buffer.edit(edits, None, cx)
1250 });
1251
1252 if let Some(operation) = operation
1253 && let Some(BufferBranchState {
1254 merged_operations, ..
1255 }) = &mut self.branch_state
1256 {
1257 merged_operations.push(operation);
1258 }
1259 }
1260
1261 fn on_base_buffer_event(
1262 &mut self,
1263 _: Entity<Buffer>,
1264 event: &BufferEvent,
1265 cx: &mut Context<Self>,
1266 ) {
1267 let BufferEvent::Operation { operation, .. } = event else {
1268 return;
1269 };
1270 let Some(BufferBranchState {
1271 merged_operations, ..
1272 }) = &mut self.branch_state
1273 else {
1274 return;
1275 };
1276
1277 let mut operation_to_undo = None;
1278 if let Operation::Buffer(text::Operation::Edit(operation)) = &operation
1279 && let Ok(ix) = merged_operations.binary_search(&operation.timestamp)
1280 {
1281 merged_operations.remove(ix);
1282 operation_to_undo = Some(operation.timestamp);
1283 }
1284
1285 self.apply_ops([operation.clone()], cx);
1286
1287 if let Some(timestamp) = operation_to_undo {
1288 let counts = [(timestamp, u32::MAX)].into_iter().collect();
1289 self.undo_operations(counts, cx);
1290 }
1291 }
1292
1293 #[cfg(test)]
1294 pub(crate) fn as_text_snapshot(&self) -> &text::BufferSnapshot {
1295 &self.text
1296 }
1297
1298 /// Retrieve a snapshot of the buffer's raw text, without any
1299 /// language-related state like the syntax tree or diagnostics.
1300 pub fn text_snapshot(&self) -> text::BufferSnapshot {
1301 self.text.snapshot()
1302 }
1303
1304 /// The file associated with the buffer, if any.
1305 pub fn file(&self) -> Option<&Arc<dyn File>> {
1306 self.file.as_ref()
1307 }
1308
1309 /// The version of the buffer that was last saved or reloaded from disk.
1310 pub fn saved_version(&self) -> &clock::Global {
1311 &self.saved_version
1312 }
1313
1314 /// The mtime of the buffer's file when the buffer was last saved or reloaded from disk.
1315 pub fn saved_mtime(&self) -> Option<MTime> {
1316 self.saved_mtime
1317 }
1318
1319 /// Assign a language to the buffer.
1320 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1321 self.non_text_state_update_count += 1;
1322 self.syntax_map.lock().clear(&self.text);
1323 self.language = language;
1324 self.was_changed();
1325 self.reparse(cx);
1326 cx.emit(BufferEvent::LanguageChanged);
1327 }
1328
1329 /// Assign a language registry to the buffer. This allows the buffer to retrieve
1330 /// other languages if parts of the buffer are written in different languages.
1331 pub fn set_language_registry(&self, language_registry: Arc<LanguageRegistry>) {
1332 self.syntax_map
1333 .lock()
1334 .set_language_registry(language_registry);
1335 }
1336
1337 pub fn language_registry(&self) -> Option<Arc<LanguageRegistry>> {
1338 self.syntax_map.lock().language_registry()
1339 }
1340
1341 /// Assign the line ending type to the buffer.
1342 pub fn set_line_ending(&mut self, line_ending: LineEnding, cx: &mut Context<Self>) {
1343 self.text.set_line_ending(line_ending);
1344
1345 let lamport_timestamp = self.text.lamport_clock.tick();
1346 self.send_operation(
1347 Operation::UpdateLineEnding {
1348 line_ending,
1349 lamport_timestamp,
1350 },
1351 true,
1352 cx,
1353 );
1354 }
1355
1356 /// Assign the buffer a new [`Capability`].
1357 pub fn set_capability(&mut self, capability: Capability, cx: &mut Context<Self>) {
1358 if self.capability != capability {
1359 self.capability = capability;
1360 cx.emit(BufferEvent::CapabilityChanged)
1361 }
1362 }
1363
1364 /// This method is called to signal that the buffer has been saved.
1365 pub fn did_save(
1366 &mut self,
1367 version: clock::Global,
1368 mtime: Option<MTime>,
1369 cx: &mut Context<Self>,
1370 ) {
1371 self.saved_version = version.clone();
1372 self.has_unsaved_edits.set((version, false));
1373 self.has_conflict = false;
1374 self.saved_mtime = mtime;
1375 self.was_changed();
1376 cx.emit(BufferEvent::Saved);
1377 cx.notify();
1378 }
1379
1380 /// Reloads the contents of the buffer from disk.
1381 pub fn reload(&mut self, cx: &Context<Self>) -> oneshot::Receiver<Option<Transaction>> {
1382 let (tx, rx) = futures::channel::oneshot::channel();
1383 let prev_version = self.text.version();
1384 self.reload_task = Some(cx.spawn(async move |this, cx| {
1385 let Some((new_mtime, new_text)) = this.update(cx, |this, cx| {
1386 let file = this.file.as_ref()?.as_local()?;
1387
1388 Some((file.disk_state().mtime(), file.load(cx)))
1389 })?
1390 else {
1391 return Ok(());
1392 };
1393
1394 let new_text = new_text.await?;
1395 let diff = this
1396 .update(cx, |this, cx| this.diff(new_text.clone(), cx))?
1397 .await;
1398 this.update(cx, |this, cx| {
1399 if this.version() == diff.base_version {
1400 this.finalize_last_transaction();
1401 this.apply_diff(diff, cx);
1402 tx.send(this.finalize_last_transaction().cloned()).ok();
1403 this.has_conflict = false;
1404 this.did_reload(this.version(), this.line_ending(), new_mtime, cx);
1405 } else {
1406 if !diff.edits.is_empty()
1407 || this
1408 .edits_since::<usize>(&diff.base_version)
1409 .next()
1410 .is_some()
1411 {
1412 this.has_conflict = true;
1413 }
1414
1415 this.did_reload(prev_version, this.line_ending(), this.saved_mtime, cx);
1416 }
1417
1418 this.reload_task.take();
1419 })
1420 }));
1421 rx
1422 }
1423
1424 /// This method is called to signal that the buffer has been reloaded.
1425 pub fn did_reload(
1426 &mut self,
1427 version: clock::Global,
1428 line_ending: LineEnding,
1429 mtime: Option<MTime>,
1430 cx: &mut Context<Self>,
1431 ) {
1432 self.saved_version = version;
1433 self.has_unsaved_edits
1434 .set((self.saved_version.clone(), false));
1435 self.text.set_line_ending(line_ending);
1436 self.saved_mtime = mtime;
1437 cx.emit(BufferEvent::Reloaded);
1438 cx.notify();
1439 }
1440
1441 /// Updates the [`File`] backing this buffer. This should be called when
1442 /// the file has changed or has been deleted.
1443 pub fn file_updated(&mut self, new_file: Arc<dyn File>, cx: &mut Context<Self>) {
1444 let was_dirty = self.is_dirty();
1445 let mut file_changed = false;
1446
1447 if let Some(old_file) = self.file.as_ref() {
1448 if new_file.path() != old_file.path() {
1449 file_changed = true;
1450 }
1451
1452 let old_state = old_file.disk_state();
1453 let new_state = new_file.disk_state();
1454 if old_state != new_state {
1455 file_changed = true;
1456 if !was_dirty && matches!(new_state, DiskState::Present { .. }) {
1457 cx.emit(BufferEvent::ReloadNeeded)
1458 }
1459 }
1460 } else {
1461 file_changed = true;
1462 };
1463
1464 self.file = Some(new_file);
1465 if file_changed {
1466 self.was_changed();
1467 self.non_text_state_update_count += 1;
1468 if was_dirty != self.is_dirty() {
1469 cx.emit(BufferEvent::DirtyChanged);
1470 }
1471 cx.emit(BufferEvent::FileHandleChanged);
1472 cx.notify();
1473 }
1474 }
1475
1476 pub fn base_buffer(&self) -> Option<Entity<Self>> {
1477 Some(self.branch_state.as_ref()?.base_buffer.clone())
1478 }
1479
1480 /// Returns the primary [`Language`] assigned to this [`Buffer`].
1481 pub fn language(&self) -> Option<&Arc<Language>> {
1482 self.language.as_ref()
1483 }
1484
1485 /// Returns the [`Language`] at the given location.
1486 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
1487 let offset = position.to_offset(self);
1488 let mut is_first = true;
1489 let start_anchor = self.anchor_before(offset);
1490 let end_anchor = self.anchor_after(offset);
1491 self.syntax_map
1492 .lock()
1493 .layers_for_range(offset..offset, &self.text, false)
1494 .filter(|layer| {
1495 if is_first {
1496 is_first = false;
1497 return true;
1498 }
1499
1500 layer
1501 .included_sub_ranges
1502 .map(|sub_ranges| {
1503 sub_ranges.iter().any(|sub_range| {
1504 let is_before_start = sub_range.end.cmp(&start_anchor, self).is_lt();
1505 let is_after_end = sub_range.start.cmp(&end_anchor, self).is_gt();
1506 !is_before_start && !is_after_end
1507 })
1508 })
1509 .unwrap_or(true)
1510 })
1511 .last()
1512 .map(|info| info.language.clone())
1513 .or_else(|| self.language.clone())
1514 }
1515
1516 /// Returns each [`Language`] for the active syntax layers at the given location.
1517 pub fn languages_at<D: ToOffset>(&self, position: D) -> Vec<Arc<Language>> {
1518 let offset = position.to_offset(self);
1519 let mut languages: Vec<Arc<Language>> = self
1520 .syntax_map
1521 .lock()
1522 .layers_for_range(offset..offset, &self.text, false)
1523 .map(|info| info.language.clone())
1524 .collect();
1525
1526 if languages.is_empty()
1527 && let Some(buffer_language) = self.language()
1528 {
1529 languages.push(buffer_language.clone());
1530 }
1531
1532 languages
1533 }
1534
1535 /// An integer version number that accounts for all updates besides
1536 /// the buffer's text itself (which is versioned via a version vector).
1537 pub fn non_text_state_update_count(&self) -> usize {
1538 self.non_text_state_update_count
1539 }
1540
1541 /// Whether the buffer is being parsed in the background.
1542 #[cfg(any(test, feature = "test-support"))]
1543 pub fn is_parsing(&self) -> bool {
1544 self.reparse.is_some()
1545 }
1546
1547 /// Indicates whether the buffer contains any regions that may be
1548 /// written in a language that hasn't been loaded yet.
1549 pub fn contains_unknown_injections(&self) -> bool {
1550 self.syntax_map.lock().contains_unknown_injections()
1551 }
1552
1553 #[cfg(any(test, feature = "test-support"))]
1554 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
1555 self.sync_parse_timeout = timeout;
1556 }
1557
1558 /// Called after an edit to synchronize the buffer's main parse tree with
1559 /// the buffer's new underlying state.
1560 ///
1561 /// Locks the syntax map and interpolates the edits since the last reparse
1562 /// into the foreground syntax tree.
1563 ///
1564 /// Then takes a stable snapshot of the syntax map before unlocking it.
1565 /// The snapshot with the interpolated edits is sent to a background thread,
1566 /// where we ask Tree-sitter to perform an incremental parse.
1567 ///
1568 /// Meanwhile, in the foreground, we block the main thread for up to 1ms
1569 /// waiting on the parse to complete. As soon as it completes, we proceed
1570 /// synchronously, unless a 1ms timeout elapses.
1571 ///
1572 /// If we time out waiting on the parse, we spawn a second task waiting
1573 /// until the parse does complete and return with the interpolated tree still
1574 /// in the foreground. When the background parse completes, call back into
1575 /// the main thread and assign the foreground parse state.
1576 ///
1577 /// If the buffer or grammar changed since the start of the background parse,
1578 /// initiate an additional reparse recursively. To avoid concurrent parses
1579 /// for the same buffer, we only initiate a new parse if we are not already
1580 /// parsing in the background.
1581 pub fn reparse(&mut self, cx: &mut Context<Self>) {
1582 if self.reparse.is_some() {
1583 return;
1584 }
1585 let language = if let Some(language) = self.language.clone() {
1586 language
1587 } else {
1588 return;
1589 };
1590
1591 let text = self.text_snapshot();
1592 let parsed_version = self.version();
1593
1594 let mut syntax_map = self.syntax_map.lock();
1595 syntax_map.interpolate(&text);
1596 let language_registry = syntax_map.language_registry();
1597 let mut syntax_snapshot = syntax_map.snapshot();
1598 drop(syntax_map);
1599
1600 let parse_task = cx.background_spawn({
1601 let language = language.clone();
1602 let language_registry = language_registry.clone();
1603 async move {
1604 syntax_snapshot.reparse(&text, language_registry, language);
1605 syntax_snapshot
1606 }
1607 });
1608
1609 self.parse_status.0.send(ParseStatus::Parsing).unwrap();
1610 match cx
1611 .background_executor()
1612 .block_with_timeout(self.sync_parse_timeout, parse_task)
1613 {
1614 Ok(new_syntax_snapshot) => {
1615 self.did_finish_parsing(new_syntax_snapshot, cx);
1616 self.reparse = None;
1617 }
1618 Err(parse_task) => {
1619 // todo(lw): hot foreground spawn
1620 self.reparse = Some(cx.spawn(async move |this, cx| {
1621 let new_syntax_map = cx.background_spawn(parse_task).await;
1622 this.update(cx, move |this, cx| {
1623 let grammar_changed = || {
1624 this.language.as_ref().is_none_or(|current_language| {
1625 !Arc::ptr_eq(&language, current_language)
1626 })
1627 };
1628 let language_registry_changed = || {
1629 new_syntax_map.contains_unknown_injections()
1630 && language_registry.is_some_and(|registry| {
1631 registry.version() != new_syntax_map.language_registry_version()
1632 })
1633 };
1634 let parse_again = this.version.changed_since(&parsed_version)
1635 || language_registry_changed()
1636 || grammar_changed();
1637 this.did_finish_parsing(new_syntax_map, cx);
1638 this.reparse = None;
1639 if parse_again {
1640 this.reparse(cx);
1641 }
1642 })
1643 .ok();
1644 }));
1645 }
1646 }
1647 }
1648
1649 fn did_finish_parsing(&mut self, syntax_snapshot: SyntaxSnapshot, cx: &mut Context<Self>) {
1650 self.was_changed();
1651 self.non_text_state_update_count += 1;
1652 self.syntax_map.lock().did_parse(syntax_snapshot);
1653 self.request_autoindent(cx);
1654 self.parse_status.0.send(ParseStatus::Idle).unwrap();
1655 self.tree_sitter_data.lock().clear();
1656 cx.emit(BufferEvent::Reparsed);
1657 cx.notify();
1658 }
1659
1660 pub fn parse_status(&self) -> watch::Receiver<ParseStatus> {
1661 self.parse_status.1.clone()
1662 }
1663
1664 /// Assign to the buffer a set of diagnostics created by a given language server.
1665 pub fn update_diagnostics(
1666 &mut self,
1667 server_id: LanguageServerId,
1668 diagnostics: DiagnosticSet,
1669 cx: &mut Context<Self>,
1670 ) {
1671 let lamport_timestamp = self.text.lamport_clock.tick();
1672 let op = Operation::UpdateDiagnostics {
1673 server_id,
1674 diagnostics: diagnostics.iter().cloned().collect(),
1675 lamport_timestamp,
1676 };
1677
1678 self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
1679 self.send_operation(op, true, cx);
1680 }
1681
1682 pub fn buffer_diagnostics(
1683 &self,
1684 for_server: Option<LanguageServerId>,
1685 ) -> Vec<&DiagnosticEntry<Anchor>> {
1686 match for_server {
1687 Some(server_id) => match self.diagnostics.binary_search_by_key(&server_id, |v| v.0) {
1688 Ok(idx) => self.diagnostics[idx].1.iter().collect(),
1689 Err(_) => Vec::new(),
1690 },
1691 None => self
1692 .diagnostics
1693 .iter()
1694 .flat_map(|(_, diagnostic_set)| diagnostic_set.iter())
1695 .collect(),
1696 }
1697 }
1698
1699 fn request_autoindent(&mut self, cx: &mut Context<Self>) {
1700 if let Some(indent_sizes) = self.compute_autoindents() {
1701 let indent_sizes = cx.background_spawn(indent_sizes);
1702 match cx
1703 .background_executor()
1704 .block_with_timeout(Duration::from_micros(500), indent_sizes)
1705 {
1706 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
1707 Err(indent_sizes) => {
1708 self.pending_autoindent = Some(cx.spawn(async move |this, cx| {
1709 let indent_sizes = indent_sizes.await;
1710 this.update(cx, |this, cx| {
1711 this.apply_autoindents(indent_sizes, cx);
1712 })
1713 .ok();
1714 }));
1715 }
1716 }
1717 } else {
1718 self.autoindent_requests.clear();
1719 for tx in self.wait_for_autoindent_txs.drain(..) {
1720 tx.send(()).ok();
1721 }
1722 }
1723 }
1724
1725 fn compute_autoindents(
1726 &self,
1727 ) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>> + use<>> {
1728 let max_rows_between_yields = 100;
1729 let snapshot = self.snapshot();
1730 if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
1731 return None;
1732 }
1733
1734 let autoindent_requests = self.autoindent_requests.clone();
1735 Some(async move {
1736 let mut indent_sizes = BTreeMap::<u32, (IndentSize, bool)>::new();
1737 for request in autoindent_requests {
1738 // Resolve each edited range to its row in the current buffer and in the
1739 // buffer before this batch of edits.
1740 let mut row_ranges = Vec::new();
1741 let mut old_to_new_rows = BTreeMap::new();
1742 let mut language_indent_sizes_by_new_row = Vec::new();
1743 for entry in &request.entries {
1744 let position = entry.range.start;
1745 let new_row = position.to_point(&snapshot).row;
1746 let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
1747 language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
1748
1749 if !entry.first_line_is_new {
1750 let old_row = position.to_point(&request.before_edit).row;
1751 old_to_new_rows.insert(old_row, new_row);
1752 }
1753 row_ranges.push((new_row..new_end_row, entry.original_indent_column));
1754 }
1755
1756 // Build a map containing the suggested indentation for each of the edited lines
1757 // with respect to the state of the buffer before these edits. This map is keyed
1758 // by the rows for these lines in the current state of the buffer.
1759 let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
1760 let old_edited_ranges =
1761 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
1762 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1763 let mut language_indent_size = IndentSize::default();
1764 for old_edited_range in old_edited_ranges {
1765 let suggestions = request
1766 .before_edit
1767 .suggest_autoindents(old_edited_range.clone())
1768 .into_iter()
1769 .flatten();
1770 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1771 if let Some(suggestion) = suggestion {
1772 let new_row = *old_to_new_rows.get(&old_row).unwrap();
1773
1774 // Find the indent size based on the language for this row.
1775 while let Some((row, size)) = language_indent_sizes.peek() {
1776 if *row > new_row {
1777 break;
1778 }
1779 language_indent_size = *size;
1780 language_indent_sizes.next();
1781 }
1782
1783 let suggested_indent = old_to_new_rows
1784 .get(&suggestion.basis_row)
1785 .and_then(|from_row| {
1786 Some(old_suggestions.get(from_row).copied()?.0)
1787 })
1788 .unwrap_or_else(|| {
1789 request
1790 .before_edit
1791 .indent_size_for_line(suggestion.basis_row)
1792 })
1793 .with_delta(suggestion.delta, language_indent_size);
1794 old_suggestions
1795 .insert(new_row, (suggested_indent, suggestion.within_error));
1796 }
1797 }
1798 yield_now().await;
1799 }
1800
1801 // Compute new suggestions for each line, but only include them in the result
1802 // if they differ from the old suggestion for that line.
1803 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1804 let mut language_indent_size = IndentSize::default();
1805 for (row_range, original_indent_column) in row_ranges {
1806 let new_edited_row_range = if request.is_block_mode {
1807 row_range.start..row_range.start + 1
1808 } else {
1809 row_range.clone()
1810 };
1811
1812 let suggestions = snapshot
1813 .suggest_autoindents(new_edited_row_range.clone())
1814 .into_iter()
1815 .flatten();
1816 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
1817 if let Some(suggestion) = suggestion {
1818 // Find the indent size based on the language for this row.
1819 while let Some((row, size)) = language_indent_sizes.peek() {
1820 if *row > new_row {
1821 break;
1822 }
1823 language_indent_size = *size;
1824 language_indent_sizes.next();
1825 }
1826
1827 let suggested_indent = indent_sizes
1828 .get(&suggestion.basis_row)
1829 .copied()
1830 .map(|e| e.0)
1831 .unwrap_or_else(|| {
1832 snapshot.indent_size_for_line(suggestion.basis_row)
1833 })
1834 .with_delta(suggestion.delta, language_indent_size);
1835
1836 if old_suggestions.get(&new_row).is_none_or(
1837 |(old_indentation, was_within_error)| {
1838 suggested_indent != *old_indentation
1839 && (!suggestion.within_error || *was_within_error)
1840 },
1841 ) {
1842 indent_sizes.insert(
1843 new_row,
1844 (suggested_indent, request.ignore_empty_lines),
1845 );
1846 }
1847 }
1848 }
1849
1850 if let (true, Some(original_indent_column)) =
1851 (request.is_block_mode, original_indent_column)
1852 {
1853 let new_indent =
1854 if let Some((indent, _)) = indent_sizes.get(&row_range.start) {
1855 *indent
1856 } else {
1857 snapshot.indent_size_for_line(row_range.start)
1858 };
1859 let delta = new_indent.len as i64 - original_indent_column as i64;
1860 if delta != 0 {
1861 for row in row_range.skip(1) {
1862 indent_sizes.entry(row).or_insert_with(|| {
1863 let mut size = snapshot.indent_size_for_line(row);
1864 if size.kind == new_indent.kind {
1865 match delta.cmp(&0) {
1866 Ordering::Greater => size.len += delta as u32,
1867 Ordering::Less => {
1868 size.len = size.len.saturating_sub(-delta as u32)
1869 }
1870 Ordering::Equal => {}
1871 }
1872 }
1873 (size, request.ignore_empty_lines)
1874 });
1875 }
1876 }
1877 }
1878
1879 yield_now().await;
1880 }
1881 }
1882
1883 indent_sizes
1884 .into_iter()
1885 .filter_map(|(row, (indent, ignore_empty_lines))| {
1886 if ignore_empty_lines && snapshot.line_len(row) == 0 {
1887 None
1888 } else {
1889 Some((row, indent))
1890 }
1891 })
1892 .collect()
1893 })
1894 }
1895
1896 fn apply_autoindents(
1897 &mut self,
1898 indent_sizes: BTreeMap<u32, IndentSize>,
1899 cx: &mut Context<Self>,
1900 ) {
1901 self.autoindent_requests.clear();
1902 for tx in self.wait_for_autoindent_txs.drain(..) {
1903 tx.send(()).ok();
1904 }
1905
1906 let edits: Vec<_> = indent_sizes
1907 .into_iter()
1908 .filter_map(|(row, indent_size)| {
1909 let current_size = indent_size_for_line(self, row);
1910 Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
1911 })
1912 .collect();
1913
1914 let preserve_preview = self.preserve_preview();
1915 self.edit(edits, None, cx);
1916 if preserve_preview {
1917 self.refresh_preview();
1918 }
1919 }
1920
1921 /// Create a minimal edit that will cause the given row to be indented
1922 /// with the given size. After applying this edit, the length of the line
1923 /// will always be at least `new_size.len`.
1924 pub fn edit_for_indent_size_adjustment(
1925 row: u32,
1926 current_size: IndentSize,
1927 new_size: IndentSize,
1928 ) -> Option<(Range<Point>, String)> {
1929 if new_size.kind == current_size.kind {
1930 match new_size.len.cmp(¤t_size.len) {
1931 Ordering::Greater => {
1932 let point = Point::new(row, 0);
1933 Some((
1934 point..point,
1935 iter::repeat(new_size.char())
1936 .take((new_size.len - current_size.len) as usize)
1937 .collect::<String>(),
1938 ))
1939 }
1940
1941 Ordering::Less => Some((
1942 Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
1943 String::new(),
1944 )),
1945
1946 Ordering::Equal => None,
1947 }
1948 } else {
1949 Some((
1950 Point::new(row, 0)..Point::new(row, current_size.len),
1951 iter::repeat(new_size.char())
1952 .take(new_size.len as usize)
1953 .collect::<String>(),
1954 ))
1955 }
1956 }
1957
1958 /// Spawns a background task that asynchronously computes a `Diff` between the buffer's text
1959 /// and the given new text.
1960 pub fn diff(&self, mut new_text: String, cx: &App) -> Task<Diff> {
1961 let old_text = self.as_rope().clone();
1962 let base_version = self.version();
1963 cx.background_executor()
1964 .spawn_labeled(*BUFFER_DIFF_TASK, async move {
1965 let old_text = old_text.to_string();
1966 let line_ending = LineEnding::detect(&new_text);
1967 LineEnding::normalize(&mut new_text);
1968 let edits = text_diff(&old_text, &new_text);
1969 Diff {
1970 base_version,
1971 line_ending,
1972 edits,
1973 }
1974 })
1975 }
1976
1977 /// Spawns a background task that searches the buffer for any whitespace
1978 /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
1979 pub fn remove_trailing_whitespace(&self, cx: &App) -> Task<Diff> {
1980 let old_text = self.as_rope().clone();
1981 let line_ending = self.line_ending();
1982 let base_version = self.version();
1983 cx.background_spawn(async move {
1984 let ranges = trailing_whitespace_ranges(&old_text);
1985 let empty = Arc::<str>::from("");
1986 Diff {
1987 base_version,
1988 line_ending,
1989 edits: ranges
1990 .into_iter()
1991 .map(|range| (range, empty.clone()))
1992 .collect(),
1993 }
1994 })
1995 }
1996
1997 /// Ensures that the buffer ends with a single newline character, and
1998 /// no other whitespace. Skips if the buffer is empty.
1999 pub fn ensure_final_newline(&mut self, cx: &mut Context<Self>) {
2000 let len = self.len();
2001 if len == 0 {
2002 return;
2003 }
2004 let mut offset = len;
2005 for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
2006 let non_whitespace_len = chunk
2007 .trim_end_matches(|c: char| c.is_ascii_whitespace())
2008 .len();
2009 offset -= chunk.len();
2010 offset += non_whitespace_len;
2011 if non_whitespace_len != 0 {
2012 if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
2013 return;
2014 }
2015 break;
2016 }
2017 }
2018 self.edit([(offset..len, "\n")], None, cx);
2019 }
2020
2021 /// Applies a diff to the buffer. If the buffer has changed since the given diff was
2022 /// calculated, then adjust the diff to account for those changes, and discard any
2023 /// parts of the diff that conflict with those changes.
2024 pub fn apply_diff(&mut self, diff: Diff, cx: &mut Context<Self>) -> Option<TransactionId> {
2025 let snapshot = self.snapshot();
2026 let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
2027 let mut delta = 0;
2028 let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
2029 while let Some(edit_since) = edits_since.peek() {
2030 // If the edit occurs after a diff hunk, then it does not
2031 // affect that hunk.
2032 if edit_since.old.start > range.end {
2033 break;
2034 }
2035 // If the edit precedes the diff hunk, then adjust the hunk
2036 // to reflect the edit.
2037 else if edit_since.old.end < range.start {
2038 delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
2039 edits_since.next();
2040 }
2041 // If the edit intersects a diff hunk, then discard that hunk.
2042 else {
2043 return None;
2044 }
2045 }
2046
2047 let start = (range.start as i64 + delta) as usize;
2048 let end = (range.end as i64 + delta) as usize;
2049 Some((start..end, new_text))
2050 });
2051
2052 self.start_transaction();
2053 self.text.set_line_ending(diff.line_ending);
2054 self.edit(adjusted_edits, None, cx);
2055 self.end_transaction(cx)
2056 }
2057
2058 pub fn has_unsaved_edits(&self) -> bool {
2059 let (last_version, has_unsaved_edits) = self.has_unsaved_edits.take();
2060
2061 if last_version == self.version {
2062 self.has_unsaved_edits
2063 .set((last_version, has_unsaved_edits));
2064 return has_unsaved_edits;
2065 }
2066
2067 let has_edits = self.has_edits_since(&self.saved_version);
2068 self.has_unsaved_edits
2069 .set((self.version.clone(), has_edits));
2070 has_edits
2071 }
2072
2073 /// Checks if the buffer has unsaved changes.
2074 pub fn is_dirty(&self) -> bool {
2075 if self.capability == Capability::ReadOnly {
2076 return false;
2077 }
2078 if self.has_conflict {
2079 return true;
2080 }
2081 match self.file.as_ref().map(|f| f.disk_state()) {
2082 Some(DiskState::New) | Some(DiskState::Deleted) => {
2083 !self.is_empty() && self.has_unsaved_edits()
2084 }
2085 _ => self.has_unsaved_edits(),
2086 }
2087 }
2088
2089 /// Checks if the buffer and its file have both changed since the buffer
2090 /// was last saved or reloaded.
2091 pub fn has_conflict(&self) -> bool {
2092 if self.has_conflict {
2093 return true;
2094 }
2095 let Some(file) = self.file.as_ref() else {
2096 return false;
2097 };
2098 match file.disk_state() {
2099 DiskState::New => false,
2100 DiskState::Present { mtime } => match self.saved_mtime {
2101 Some(saved_mtime) => {
2102 mtime.bad_is_greater_than(saved_mtime) && self.has_unsaved_edits()
2103 }
2104 None => true,
2105 },
2106 DiskState::Deleted => false,
2107 }
2108 }
2109
2110 /// Gets a [`Subscription`] that tracks all of the changes to the buffer's text.
2111 pub fn subscribe(&mut self) -> Subscription {
2112 self.text.subscribe()
2113 }
2114
2115 /// Adds a bit to the list of bits that are set when the buffer's text changes.
2116 ///
2117 /// This allows downstream code to check if the buffer's text has changed without
2118 /// waiting for an effect cycle, which would be required if using eents.
2119 pub fn record_changes(&mut self, bit: rc::Weak<Cell<bool>>) {
2120 if let Err(ix) = self
2121 .change_bits
2122 .binary_search_by_key(&rc::Weak::as_ptr(&bit), rc::Weak::as_ptr)
2123 {
2124 self.change_bits.insert(ix, bit);
2125 }
2126 }
2127
2128 /// Set the change bit for all "listeners".
2129 fn was_changed(&mut self) {
2130 self.change_bits.retain(|change_bit| {
2131 change_bit
2132 .upgrade()
2133 .inspect(|bit| {
2134 _ = bit.replace(true);
2135 })
2136 .is_some()
2137 });
2138 }
2139
2140 /// Starts a transaction, if one is not already in-progress. When undoing or
2141 /// redoing edits, all of the edits performed within a transaction are undone
2142 /// or redone together.
2143 pub fn start_transaction(&mut self) -> Option<TransactionId> {
2144 self.start_transaction_at(Instant::now())
2145 }
2146
2147 /// Starts a transaction, providing the current time. Subsequent transactions
2148 /// that occur within a short period of time will be grouped together. This
2149 /// is controlled by the buffer's undo grouping duration.
2150 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
2151 self.transaction_depth += 1;
2152 if self.was_dirty_before_starting_transaction.is_none() {
2153 self.was_dirty_before_starting_transaction = Some(self.is_dirty());
2154 }
2155 self.text.start_transaction_at(now)
2156 }
2157
2158 /// Terminates the current transaction, if this is the outermost transaction.
2159 pub fn end_transaction(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2160 self.end_transaction_at(Instant::now(), cx)
2161 }
2162
2163 /// Terminates the current transaction, providing the current time. Subsequent transactions
2164 /// that occur within a short period of time will be grouped together. This
2165 /// is controlled by the buffer's undo grouping duration.
2166 pub fn end_transaction_at(
2167 &mut self,
2168 now: Instant,
2169 cx: &mut Context<Self>,
2170 ) -> Option<TransactionId> {
2171 assert!(self.transaction_depth > 0);
2172 self.transaction_depth -= 1;
2173 let was_dirty = if self.transaction_depth == 0 {
2174 self.was_dirty_before_starting_transaction.take().unwrap()
2175 } else {
2176 false
2177 };
2178 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
2179 self.did_edit(&start_version, was_dirty, cx);
2180 Some(transaction_id)
2181 } else {
2182 None
2183 }
2184 }
2185
2186 /// Manually add a transaction to the buffer's undo history.
2187 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
2188 self.text.push_transaction(transaction, now);
2189 }
2190
2191 /// Differs from `push_transaction` in that it does not clear the redo
2192 /// stack. Intended to be used to create a parent transaction to merge
2193 /// potential child transactions into.
2194 ///
2195 /// The caller is responsible for removing it from the undo history using
2196 /// `forget_transaction` if no edits are merged into it. Otherwise, if edits
2197 /// are merged into this transaction, the caller is responsible for ensuring
2198 /// the redo stack is cleared. The easiest way to ensure the redo stack is
2199 /// cleared is to create transactions with the usual `start_transaction` and
2200 /// `end_transaction` methods and merging the resulting transactions into
2201 /// the transaction created by this method
2202 pub fn push_empty_transaction(&mut self, now: Instant) -> TransactionId {
2203 self.text.push_empty_transaction(now)
2204 }
2205
2206 /// Prevent the last transaction from being grouped with any subsequent transactions,
2207 /// even if they occur with the buffer's undo grouping duration.
2208 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
2209 self.text.finalize_last_transaction()
2210 }
2211
2212 /// Manually group all changes since a given transaction.
2213 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
2214 self.text.group_until_transaction(transaction_id);
2215 }
2216
2217 /// Manually remove a transaction from the buffer's undo history
2218 pub fn forget_transaction(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
2219 self.text.forget_transaction(transaction_id)
2220 }
2221
2222 /// Retrieve a transaction from the buffer's undo history
2223 pub fn get_transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> {
2224 self.text.get_transaction(transaction_id)
2225 }
2226
2227 /// Manually merge two transactions in the buffer's undo history.
2228 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
2229 self.text.merge_transactions(transaction, destination);
2230 }
2231
2232 /// Waits for the buffer to receive operations with the given timestamps.
2233 pub fn wait_for_edits<It: IntoIterator<Item = clock::Lamport>>(
2234 &mut self,
2235 edit_ids: It,
2236 ) -> impl Future<Output = Result<()>> + use<It> {
2237 self.text.wait_for_edits(edit_ids)
2238 }
2239
2240 /// Waits for the buffer to receive the operations necessary for resolving the given anchors.
2241 pub fn wait_for_anchors<It: IntoIterator<Item = Anchor>>(
2242 &mut self,
2243 anchors: It,
2244 ) -> impl 'static + Future<Output = Result<()>> + use<It> {
2245 self.text.wait_for_anchors(anchors)
2246 }
2247
2248 /// Waits for the buffer to receive operations up to the given version.
2249 pub fn wait_for_version(
2250 &mut self,
2251 version: clock::Global,
2252 ) -> impl Future<Output = Result<()>> + use<> {
2253 self.text.wait_for_version(version)
2254 }
2255
2256 /// Forces all futures returned by [`Buffer::wait_for_version`], [`Buffer::wait_for_edits`], or
2257 /// [`Buffer::wait_for_version`] to resolve with an error.
2258 pub fn give_up_waiting(&mut self) {
2259 self.text.give_up_waiting();
2260 }
2261
2262 pub fn wait_for_autoindent_applied(&mut self) -> Option<oneshot::Receiver<()>> {
2263 let mut rx = None;
2264 if !self.autoindent_requests.is_empty() {
2265 let channel = oneshot::channel();
2266 self.wait_for_autoindent_txs.push(channel.0);
2267 rx = Some(channel.1);
2268 }
2269 rx
2270 }
2271
2272 /// Stores a set of selections that should be broadcasted to all of the buffer's replicas.
2273 pub fn set_active_selections(
2274 &mut self,
2275 selections: Arc<[Selection<Anchor>]>,
2276 line_mode: bool,
2277 cursor_shape: CursorShape,
2278 cx: &mut Context<Self>,
2279 ) {
2280 let lamport_timestamp = self.text.lamport_clock.tick();
2281 self.remote_selections.insert(
2282 self.text.replica_id(),
2283 SelectionSet {
2284 selections: selections.clone(),
2285 lamport_timestamp,
2286 line_mode,
2287 cursor_shape,
2288 },
2289 );
2290 self.send_operation(
2291 Operation::UpdateSelections {
2292 selections,
2293 line_mode,
2294 lamport_timestamp,
2295 cursor_shape,
2296 },
2297 true,
2298 cx,
2299 );
2300 self.non_text_state_update_count += 1;
2301 cx.notify();
2302 }
2303
2304 /// Clears the selections, so that other replicas of the buffer do not see any selections for
2305 /// this replica.
2306 pub fn remove_active_selections(&mut self, cx: &mut Context<Self>) {
2307 if self
2308 .remote_selections
2309 .get(&self.text.replica_id())
2310 .is_none_or(|set| !set.selections.is_empty())
2311 {
2312 self.set_active_selections(Arc::default(), false, Default::default(), cx);
2313 }
2314 }
2315
2316 pub fn set_agent_selections(
2317 &mut self,
2318 selections: Arc<[Selection<Anchor>]>,
2319 line_mode: bool,
2320 cursor_shape: CursorShape,
2321 cx: &mut Context<Self>,
2322 ) {
2323 let lamport_timestamp = self.text.lamport_clock.tick();
2324 self.remote_selections.insert(
2325 ReplicaId::AGENT,
2326 SelectionSet {
2327 selections,
2328 lamport_timestamp,
2329 line_mode,
2330 cursor_shape,
2331 },
2332 );
2333 self.non_text_state_update_count += 1;
2334 cx.notify();
2335 }
2336
2337 pub fn remove_agent_selections(&mut self, cx: &mut Context<Self>) {
2338 self.set_agent_selections(Arc::default(), false, Default::default(), cx);
2339 }
2340
2341 /// Replaces the buffer's entire text.
2342 pub fn set_text<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2343 where
2344 T: Into<Arc<str>>,
2345 {
2346 self.autoindent_requests.clear();
2347 self.edit([(0..self.len(), text)], None, cx)
2348 }
2349
2350 /// Appends the given text to the end of the buffer.
2351 pub fn append<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2352 where
2353 T: Into<Arc<str>>,
2354 {
2355 self.edit([(self.len()..self.len(), text)], None, cx)
2356 }
2357
2358 /// Applies the given edits to the buffer. Each edit is specified as a range of text to
2359 /// delete, and a string of text to insert at that location.
2360 ///
2361 /// If an [`AutoindentMode`] is provided, then the buffer will enqueue an auto-indent
2362 /// request for the edited ranges, which will be processed when the buffer finishes
2363 /// parsing.
2364 ///
2365 /// Parsing takes place at the end of a transaction, and may compute synchronously
2366 /// or asynchronously, depending on the changes.
2367 pub fn edit<I, S, T>(
2368 &mut self,
2369 edits_iter: I,
2370 autoindent_mode: Option<AutoindentMode>,
2371 cx: &mut Context<Self>,
2372 ) -> Option<clock::Lamport>
2373 where
2374 I: IntoIterator<Item = (Range<S>, T)>,
2375 S: ToOffset,
2376 T: Into<Arc<str>>,
2377 {
2378 // Skip invalid edits and coalesce contiguous ones.
2379 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
2380
2381 for (range, new_text) in edits_iter {
2382 let mut range = range.start.to_offset(self)..range.end.to_offset(self);
2383
2384 if range.start > range.end {
2385 mem::swap(&mut range.start, &mut range.end);
2386 }
2387 let new_text = new_text.into();
2388 if !new_text.is_empty() || !range.is_empty() {
2389 if let Some((prev_range, prev_text)) = edits.last_mut()
2390 && prev_range.end >= range.start
2391 {
2392 prev_range.end = cmp::max(prev_range.end, range.end);
2393 *prev_text = format!("{prev_text}{new_text}").into();
2394 } else {
2395 edits.push((range, new_text));
2396 }
2397 }
2398 }
2399 if edits.is_empty() {
2400 return None;
2401 }
2402
2403 self.start_transaction();
2404 self.pending_autoindent.take();
2405 let autoindent_request = autoindent_mode
2406 .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
2407
2408 let edit_operation = self.text.edit(edits.iter().cloned());
2409 let edit_id = edit_operation.timestamp();
2410
2411 if let Some((before_edit, mode)) = autoindent_request {
2412 let mut delta = 0isize;
2413 let mut previous_setting = None;
2414 let entries: Vec<_> = edits
2415 .into_iter()
2416 .enumerate()
2417 .zip(&edit_operation.as_edit().unwrap().new_text)
2418 .filter(|((_, (range, _)), _)| {
2419 let language = before_edit.language_at(range.start);
2420 let language_id = language.map(|l| l.id());
2421 if let Some((cached_language_id, auto_indent)) = previous_setting
2422 && cached_language_id == language_id
2423 {
2424 auto_indent
2425 } else {
2426 // The auto-indent setting is not present in editorconfigs, hence
2427 // we can avoid passing the file here.
2428 let auto_indent =
2429 language_settings(language.map(|l| l.name()), None, cx).auto_indent;
2430 previous_setting = Some((language_id, auto_indent));
2431 auto_indent
2432 }
2433 })
2434 .map(|((ix, (range, _)), new_text)| {
2435 let new_text_length = new_text.len();
2436 let old_start = range.start.to_point(&before_edit);
2437 let new_start = (delta + range.start as isize) as usize;
2438 let range_len = range.end - range.start;
2439 delta += new_text_length as isize - range_len as isize;
2440
2441 // Decide what range of the insertion to auto-indent, and whether
2442 // the first line of the insertion should be considered a newly-inserted line
2443 // or an edit to an existing line.
2444 let mut range_of_insertion_to_indent = 0..new_text_length;
2445 let mut first_line_is_new = true;
2446
2447 let old_line_start = before_edit.indent_size_for_line(old_start.row).len;
2448 let old_line_end = before_edit.line_len(old_start.row);
2449
2450 if old_start.column > old_line_start {
2451 first_line_is_new = false;
2452 }
2453
2454 if !new_text.contains('\n')
2455 && (old_start.column + (range_len as u32) < old_line_end
2456 || old_line_end == old_line_start)
2457 {
2458 first_line_is_new = false;
2459 }
2460
2461 // When inserting text starting with a newline, avoid auto-indenting the
2462 // previous line.
2463 if new_text.starts_with('\n') {
2464 range_of_insertion_to_indent.start += 1;
2465 first_line_is_new = true;
2466 }
2467
2468 let mut original_indent_column = None;
2469 if let AutoindentMode::Block {
2470 original_indent_columns,
2471 } = &mode
2472 {
2473 original_indent_column = Some(if new_text.starts_with('\n') {
2474 indent_size_for_text(
2475 new_text[range_of_insertion_to_indent.clone()].chars(),
2476 )
2477 .len
2478 } else {
2479 original_indent_columns
2480 .get(ix)
2481 .copied()
2482 .flatten()
2483 .unwrap_or_else(|| {
2484 indent_size_for_text(
2485 new_text[range_of_insertion_to_indent.clone()].chars(),
2486 )
2487 .len
2488 })
2489 });
2490
2491 // Avoid auto-indenting the line after the edit.
2492 if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
2493 range_of_insertion_to_indent.end -= 1;
2494 }
2495 }
2496
2497 AutoindentRequestEntry {
2498 first_line_is_new,
2499 original_indent_column,
2500 indent_size: before_edit.language_indent_size_at(range.start, cx),
2501 range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
2502 ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
2503 }
2504 })
2505 .collect();
2506
2507 if !entries.is_empty() {
2508 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2509 before_edit,
2510 entries,
2511 is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
2512 ignore_empty_lines: false,
2513 }));
2514 }
2515 }
2516
2517 self.end_transaction(cx);
2518 self.send_operation(Operation::Buffer(edit_operation), true, cx);
2519 Some(edit_id)
2520 }
2521
2522 fn did_edit(&mut self, old_version: &clock::Global, was_dirty: bool, cx: &mut Context<Self>) {
2523 self.was_changed();
2524
2525 if self.edits_since::<usize>(old_version).next().is_none() {
2526 return;
2527 }
2528
2529 self.reparse(cx);
2530 cx.emit(BufferEvent::Edited);
2531 if was_dirty != self.is_dirty() {
2532 cx.emit(BufferEvent::DirtyChanged);
2533 }
2534 cx.notify();
2535 }
2536
2537 pub fn autoindent_ranges<I, T>(&mut self, ranges: I, cx: &mut Context<Self>)
2538 where
2539 I: IntoIterator<Item = Range<T>>,
2540 T: ToOffset + Copy,
2541 {
2542 let before_edit = self.snapshot();
2543 let entries = ranges
2544 .into_iter()
2545 .map(|range| AutoindentRequestEntry {
2546 range: before_edit.anchor_before(range.start)..before_edit.anchor_after(range.end),
2547 first_line_is_new: true,
2548 indent_size: before_edit.language_indent_size_at(range.start, cx),
2549 original_indent_column: None,
2550 })
2551 .collect();
2552 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2553 before_edit,
2554 entries,
2555 is_block_mode: false,
2556 ignore_empty_lines: true,
2557 }));
2558 self.request_autoindent(cx);
2559 }
2560
2561 // Inserts newlines at the given position to create an empty line, returning the start of the new line.
2562 // You can also request the insertion of empty lines above and below the line starting at the returned point.
2563 pub fn insert_empty_line(
2564 &mut self,
2565 position: impl ToPoint,
2566 space_above: bool,
2567 space_below: bool,
2568 cx: &mut Context<Self>,
2569 ) -> Point {
2570 let mut position = position.to_point(self);
2571
2572 self.start_transaction();
2573
2574 self.edit(
2575 [(position..position, "\n")],
2576 Some(AutoindentMode::EachLine),
2577 cx,
2578 );
2579
2580 if position.column > 0 {
2581 position += Point::new(1, 0);
2582 }
2583
2584 if !self.is_line_blank(position.row) {
2585 self.edit(
2586 [(position..position, "\n")],
2587 Some(AutoindentMode::EachLine),
2588 cx,
2589 );
2590 }
2591
2592 if space_above && position.row > 0 && !self.is_line_blank(position.row - 1) {
2593 self.edit(
2594 [(position..position, "\n")],
2595 Some(AutoindentMode::EachLine),
2596 cx,
2597 );
2598 position.row += 1;
2599 }
2600
2601 if space_below
2602 && (position.row == self.max_point().row || !self.is_line_blank(position.row + 1))
2603 {
2604 self.edit(
2605 [(position..position, "\n")],
2606 Some(AutoindentMode::EachLine),
2607 cx,
2608 );
2609 }
2610
2611 self.end_transaction(cx);
2612
2613 position
2614 }
2615
2616 /// Applies the given remote operations to the buffer.
2617 pub fn apply_ops<I: IntoIterator<Item = Operation>>(&mut self, ops: I, cx: &mut Context<Self>) {
2618 self.pending_autoindent.take();
2619 let was_dirty = self.is_dirty();
2620 let old_version = self.version.clone();
2621 let mut deferred_ops = Vec::new();
2622 let buffer_ops = ops
2623 .into_iter()
2624 .filter_map(|op| match op {
2625 Operation::Buffer(op) => Some(op),
2626 _ => {
2627 if self.can_apply_op(&op) {
2628 self.apply_op(op, cx);
2629 } else {
2630 deferred_ops.push(op);
2631 }
2632 None
2633 }
2634 })
2635 .collect::<Vec<_>>();
2636 for operation in buffer_ops.iter() {
2637 self.send_operation(Operation::Buffer(operation.clone()), false, cx);
2638 }
2639 self.text.apply_ops(buffer_ops);
2640 self.deferred_ops.insert(deferred_ops);
2641 self.flush_deferred_ops(cx);
2642 self.did_edit(&old_version, was_dirty, cx);
2643 // Notify independently of whether the buffer was edited as the operations could include a
2644 // selection update.
2645 cx.notify();
2646 }
2647
2648 fn flush_deferred_ops(&mut self, cx: &mut Context<Self>) {
2649 let mut deferred_ops = Vec::new();
2650 for op in self.deferred_ops.drain().iter().cloned() {
2651 if self.can_apply_op(&op) {
2652 self.apply_op(op, cx);
2653 } else {
2654 deferred_ops.push(op);
2655 }
2656 }
2657 self.deferred_ops.insert(deferred_ops);
2658 }
2659
2660 pub fn has_deferred_ops(&self) -> bool {
2661 !self.deferred_ops.is_empty() || self.text.has_deferred_ops()
2662 }
2663
2664 fn can_apply_op(&self, operation: &Operation) -> bool {
2665 match operation {
2666 Operation::Buffer(_) => {
2667 unreachable!("buffer operations should never be applied at this layer")
2668 }
2669 Operation::UpdateDiagnostics {
2670 diagnostics: diagnostic_set,
2671 ..
2672 } => diagnostic_set.iter().all(|diagnostic| {
2673 self.text.can_resolve(&diagnostic.range.start)
2674 && self.text.can_resolve(&diagnostic.range.end)
2675 }),
2676 Operation::UpdateSelections { selections, .. } => selections
2677 .iter()
2678 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
2679 Operation::UpdateCompletionTriggers { .. } | Operation::UpdateLineEnding { .. } => true,
2680 }
2681 }
2682
2683 fn apply_op(&mut self, operation: Operation, cx: &mut Context<Self>) {
2684 match operation {
2685 Operation::Buffer(_) => {
2686 unreachable!("buffer operations should never be applied at this layer")
2687 }
2688 Operation::UpdateDiagnostics {
2689 server_id,
2690 diagnostics: diagnostic_set,
2691 lamport_timestamp,
2692 } => {
2693 let snapshot = self.snapshot();
2694 self.apply_diagnostic_update(
2695 server_id,
2696 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
2697 lamport_timestamp,
2698 cx,
2699 );
2700 }
2701 Operation::UpdateSelections {
2702 selections,
2703 lamport_timestamp,
2704 line_mode,
2705 cursor_shape,
2706 } => {
2707 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id)
2708 && set.lamport_timestamp > lamport_timestamp
2709 {
2710 return;
2711 }
2712
2713 self.remote_selections.insert(
2714 lamport_timestamp.replica_id,
2715 SelectionSet {
2716 selections,
2717 lamport_timestamp,
2718 line_mode,
2719 cursor_shape,
2720 },
2721 );
2722 self.text.lamport_clock.observe(lamport_timestamp);
2723 self.non_text_state_update_count += 1;
2724 }
2725 Operation::UpdateCompletionTriggers {
2726 triggers,
2727 lamport_timestamp,
2728 server_id,
2729 } => {
2730 if triggers.is_empty() {
2731 self.completion_triggers_per_language_server
2732 .remove(&server_id);
2733 self.completion_triggers = self
2734 .completion_triggers_per_language_server
2735 .values()
2736 .flat_map(|triggers| triggers.iter().cloned())
2737 .collect();
2738 } else {
2739 self.completion_triggers_per_language_server
2740 .insert(server_id, triggers.iter().cloned().collect());
2741 self.completion_triggers.extend(triggers);
2742 }
2743 self.text.lamport_clock.observe(lamport_timestamp);
2744 }
2745 Operation::UpdateLineEnding {
2746 line_ending,
2747 lamport_timestamp,
2748 } => {
2749 self.text.set_line_ending(line_ending);
2750 self.text.lamport_clock.observe(lamport_timestamp);
2751 }
2752 }
2753 }
2754
2755 fn apply_diagnostic_update(
2756 &mut self,
2757 server_id: LanguageServerId,
2758 diagnostics: DiagnosticSet,
2759 lamport_timestamp: clock::Lamport,
2760 cx: &mut Context<Self>,
2761 ) {
2762 if lamport_timestamp > self.diagnostics_timestamp {
2763 let ix = self.diagnostics.binary_search_by_key(&server_id, |e| e.0);
2764 if diagnostics.is_empty() {
2765 if let Ok(ix) = ix {
2766 self.diagnostics.remove(ix);
2767 }
2768 } else {
2769 match ix {
2770 Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)),
2771 Ok(ix) => self.diagnostics[ix].1 = diagnostics,
2772 };
2773 }
2774 self.diagnostics_timestamp = lamport_timestamp;
2775 self.non_text_state_update_count += 1;
2776 self.text.lamport_clock.observe(lamport_timestamp);
2777 cx.notify();
2778 cx.emit(BufferEvent::DiagnosticsUpdated);
2779 }
2780 }
2781
2782 fn send_operation(&mut self, operation: Operation, is_local: bool, cx: &mut Context<Self>) {
2783 self.was_changed();
2784 cx.emit(BufferEvent::Operation {
2785 operation,
2786 is_local,
2787 });
2788 }
2789
2790 /// Removes the selections for a given peer.
2791 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut Context<Self>) {
2792 self.remote_selections.remove(&replica_id);
2793 cx.notify();
2794 }
2795
2796 /// Undoes the most recent transaction.
2797 pub fn undo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2798 let was_dirty = self.is_dirty();
2799 let old_version = self.version.clone();
2800
2801 if let Some((transaction_id, operation)) = self.text.undo() {
2802 self.send_operation(Operation::Buffer(operation), true, cx);
2803 self.did_edit(&old_version, was_dirty, cx);
2804 Some(transaction_id)
2805 } else {
2806 None
2807 }
2808 }
2809
2810 /// Manually undoes a specific transaction in the buffer's undo history.
2811 pub fn undo_transaction(
2812 &mut self,
2813 transaction_id: TransactionId,
2814 cx: &mut Context<Self>,
2815 ) -> bool {
2816 let was_dirty = self.is_dirty();
2817 let old_version = self.version.clone();
2818 if let Some(operation) = self.text.undo_transaction(transaction_id) {
2819 self.send_operation(Operation::Buffer(operation), true, cx);
2820 self.did_edit(&old_version, was_dirty, cx);
2821 true
2822 } else {
2823 false
2824 }
2825 }
2826
2827 /// Manually undoes all changes after a given transaction in the buffer's undo history.
2828 pub fn undo_to_transaction(
2829 &mut self,
2830 transaction_id: TransactionId,
2831 cx: &mut Context<Self>,
2832 ) -> bool {
2833 let was_dirty = self.is_dirty();
2834 let old_version = self.version.clone();
2835
2836 let operations = self.text.undo_to_transaction(transaction_id);
2837 let undone = !operations.is_empty();
2838 for operation in operations {
2839 self.send_operation(Operation::Buffer(operation), true, cx);
2840 }
2841 if undone {
2842 self.did_edit(&old_version, was_dirty, cx)
2843 }
2844 undone
2845 }
2846
2847 pub fn undo_operations(&mut self, counts: HashMap<Lamport, u32>, cx: &mut Context<Buffer>) {
2848 let was_dirty = self.is_dirty();
2849 let operation = self.text.undo_operations(counts);
2850 let old_version = self.version.clone();
2851 self.send_operation(Operation::Buffer(operation), true, cx);
2852 self.did_edit(&old_version, was_dirty, cx);
2853 }
2854
2855 /// Manually redoes a specific transaction in the buffer's redo history.
2856 pub fn redo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2857 let was_dirty = self.is_dirty();
2858 let old_version = self.version.clone();
2859
2860 if let Some((transaction_id, operation)) = self.text.redo() {
2861 self.send_operation(Operation::Buffer(operation), true, cx);
2862 self.did_edit(&old_version, was_dirty, cx);
2863 Some(transaction_id)
2864 } else {
2865 None
2866 }
2867 }
2868
2869 /// Manually undoes all changes until a given transaction in the buffer's redo history.
2870 pub fn redo_to_transaction(
2871 &mut self,
2872 transaction_id: TransactionId,
2873 cx: &mut Context<Self>,
2874 ) -> bool {
2875 let was_dirty = self.is_dirty();
2876 let old_version = self.version.clone();
2877
2878 let operations = self.text.redo_to_transaction(transaction_id);
2879 let redone = !operations.is_empty();
2880 for operation in operations {
2881 self.send_operation(Operation::Buffer(operation), true, cx);
2882 }
2883 if redone {
2884 self.did_edit(&old_version, was_dirty, cx)
2885 }
2886 redone
2887 }
2888
2889 /// Override current completion triggers with the user-provided completion triggers.
2890 pub fn set_completion_triggers(
2891 &mut self,
2892 server_id: LanguageServerId,
2893 triggers: BTreeSet<String>,
2894 cx: &mut Context<Self>,
2895 ) {
2896 self.completion_triggers_timestamp = self.text.lamport_clock.tick();
2897 if triggers.is_empty() {
2898 self.completion_triggers_per_language_server
2899 .remove(&server_id);
2900 self.completion_triggers = self
2901 .completion_triggers_per_language_server
2902 .values()
2903 .flat_map(|triggers| triggers.iter().cloned())
2904 .collect();
2905 } else {
2906 self.completion_triggers_per_language_server
2907 .insert(server_id, triggers.clone());
2908 self.completion_triggers.extend(triggers.iter().cloned());
2909 }
2910 self.send_operation(
2911 Operation::UpdateCompletionTriggers {
2912 triggers: triggers.into_iter().collect(),
2913 lamport_timestamp: self.completion_triggers_timestamp,
2914 server_id,
2915 },
2916 true,
2917 cx,
2918 );
2919 cx.notify();
2920 }
2921
2922 /// Returns a list of strings which trigger a completion menu for this language.
2923 /// Usually this is driven by LSP server which returns a list of trigger characters for completions.
2924 pub fn completion_triggers(&self) -> &BTreeSet<String> {
2925 &self.completion_triggers
2926 }
2927
2928 /// Call this directly after performing edits to prevent the preview tab
2929 /// from being dismissed by those edits. It causes `should_dismiss_preview`
2930 /// to return false until there are additional edits.
2931 pub fn refresh_preview(&mut self) {
2932 self.preview_version = self.version.clone();
2933 }
2934
2935 /// Whether we should preserve the preview status of a tab containing this buffer.
2936 pub fn preserve_preview(&self) -> bool {
2937 !self.has_edits_since(&self.preview_version)
2938 }
2939}
2940
2941#[doc(hidden)]
2942#[cfg(any(test, feature = "test-support"))]
2943impl Buffer {
2944 pub fn edit_via_marked_text(
2945 &mut self,
2946 marked_string: &str,
2947 autoindent_mode: Option<AutoindentMode>,
2948 cx: &mut Context<Self>,
2949 ) {
2950 let edits = self.edits_for_marked_text(marked_string);
2951 self.edit(edits, autoindent_mode, cx);
2952 }
2953
2954 pub fn set_group_interval(&mut self, group_interval: Duration) {
2955 self.text.set_group_interval(group_interval);
2956 }
2957
2958 pub fn randomly_edit<T>(&mut self, rng: &mut T, old_range_count: usize, cx: &mut Context<Self>)
2959 where
2960 T: rand::Rng,
2961 {
2962 let mut edits: Vec<(Range<usize>, String)> = Vec::new();
2963 let mut last_end = None;
2964 for _ in 0..old_range_count {
2965 if last_end.is_some_and(|last_end| last_end >= self.len()) {
2966 break;
2967 }
2968
2969 let new_start = last_end.map_or(0, |last_end| last_end + 1);
2970 let mut range = self.random_byte_range(new_start, rng);
2971 if rng.random_bool(0.2) {
2972 mem::swap(&mut range.start, &mut range.end);
2973 }
2974 last_end = Some(range.end);
2975
2976 let new_text_len = rng.random_range(0..10);
2977 let mut new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
2978 new_text = new_text.to_uppercase();
2979
2980 edits.push((range, new_text));
2981 }
2982 log::info!("mutating buffer {:?} with {:?}", self.replica_id(), edits);
2983 self.edit(edits, None, cx);
2984 }
2985
2986 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut Context<Self>) {
2987 let was_dirty = self.is_dirty();
2988 let old_version = self.version.clone();
2989
2990 let ops = self.text.randomly_undo_redo(rng);
2991 if !ops.is_empty() {
2992 for op in ops {
2993 self.send_operation(Operation::Buffer(op), true, cx);
2994 self.did_edit(&old_version, was_dirty, cx);
2995 }
2996 }
2997 }
2998}
2999
3000impl EventEmitter<BufferEvent> for Buffer {}
3001
3002impl Deref for Buffer {
3003 type Target = TextBuffer;
3004
3005 fn deref(&self) -> &Self::Target {
3006 &self.text
3007 }
3008}
3009
3010impl BufferSnapshot {
3011 /// Returns [`IndentSize`] for a given line that respects user settings and
3012 /// language preferences.
3013 pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
3014 indent_size_for_line(self, row)
3015 }
3016
3017 /// Returns [`IndentSize`] for a given position that respects user settings
3018 /// and language preferences.
3019 pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &App) -> IndentSize {
3020 let settings = language_settings(
3021 self.language_at(position).map(|l| l.name()),
3022 self.file(),
3023 cx,
3024 );
3025 if settings.hard_tabs {
3026 IndentSize::tab()
3027 } else {
3028 IndentSize::spaces(settings.tab_size.get())
3029 }
3030 }
3031
3032 /// Retrieve the suggested indent size for all of the given rows. The unit of indentation
3033 /// is passed in as `single_indent_size`.
3034 pub fn suggested_indents(
3035 &self,
3036 rows: impl Iterator<Item = u32>,
3037 single_indent_size: IndentSize,
3038 ) -> BTreeMap<u32, IndentSize> {
3039 let mut result = BTreeMap::new();
3040
3041 for row_range in contiguous_ranges(rows, 10) {
3042 let suggestions = match self.suggest_autoindents(row_range.clone()) {
3043 Some(suggestions) => suggestions,
3044 _ => break,
3045 };
3046
3047 for (row, suggestion) in row_range.zip(suggestions) {
3048 let indent_size = if let Some(suggestion) = suggestion {
3049 result
3050 .get(&suggestion.basis_row)
3051 .copied()
3052 .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
3053 .with_delta(suggestion.delta, single_indent_size)
3054 } else {
3055 self.indent_size_for_line(row)
3056 };
3057
3058 result.insert(row, indent_size);
3059 }
3060 }
3061
3062 result
3063 }
3064
3065 fn suggest_autoindents(
3066 &self,
3067 row_range: Range<u32>,
3068 ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
3069 let config = &self.language.as_ref()?.config;
3070 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
3071
3072 #[derive(Debug, Clone)]
3073 struct StartPosition {
3074 start: Point,
3075 suffix: SharedString,
3076 }
3077
3078 // Find the suggested indentation ranges based on the syntax tree.
3079 let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
3080 let end = Point::new(row_range.end, 0);
3081 let range = (start..end).to_offset(&self.text);
3082 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3083 Some(&grammar.indents_config.as_ref()?.query)
3084 });
3085 let indent_configs = matches
3086 .grammars()
3087 .iter()
3088 .map(|grammar| grammar.indents_config.as_ref().unwrap())
3089 .collect::<Vec<_>>();
3090
3091 let mut indent_ranges = Vec::<Range<Point>>::new();
3092 let mut start_positions = Vec::<StartPosition>::new();
3093 let mut outdent_positions = Vec::<Point>::new();
3094 while let Some(mat) = matches.peek() {
3095 let mut start: Option<Point> = None;
3096 let mut end: Option<Point> = None;
3097
3098 let config = indent_configs[mat.grammar_index];
3099 for capture in mat.captures {
3100 if capture.index == config.indent_capture_ix {
3101 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
3102 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
3103 } else if Some(capture.index) == config.start_capture_ix {
3104 start = Some(Point::from_ts_point(capture.node.end_position()));
3105 } else if Some(capture.index) == config.end_capture_ix {
3106 end = Some(Point::from_ts_point(capture.node.start_position()));
3107 } else if Some(capture.index) == config.outdent_capture_ix {
3108 outdent_positions.push(Point::from_ts_point(capture.node.start_position()));
3109 } else if let Some(suffix) = config.suffixed_start_captures.get(&capture.index) {
3110 start_positions.push(StartPosition {
3111 start: Point::from_ts_point(capture.node.start_position()),
3112 suffix: suffix.clone(),
3113 });
3114 }
3115 }
3116
3117 matches.advance();
3118 if let Some((start, end)) = start.zip(end) {
3119 if start.row == end.row {
3120 continue;
3121 }
3122 let range = start..end;
3123 match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
3124 Err(ix) => indent_ranges.insert(ix, range),
3125 Ok(ix) => {
3126 let prev_range = &mut indent_ranges[ix];
3127 prev_range.end = prev_range.end.max(range.end);
3128 }
3129 }
3130 }
3131 }
3132
3133 let mut error_ranges = Vec::<Range<Point>>::new();
3134 let mut matches = self
3135 .syntax
3136 .matches(range, &self.text, |grammar| grammar.error_query.as_ref());
3137 while let Some(mat) = matches.peek() {
3138 let node = mat.captures[0].node;
3139 let start = Point::from_ts_point(node.start_position());
3140 let end = Point::from_ts_point(node.end_position());
3141 let range = start..end;
3142 let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
3143 Ok(ix) | Err(ix) => ix,
3144 };
3145 let mut end_ix = ix;
3146 while let Some(existing_range) = error_ranges.get(end_ix) {
3147 if existing_range.end < end {
3148 end_ix += 1;
3149 } else {
3150 break;
3151 }
3152 }
3153 error_ranges.splice(ix..end_ix, [range]);
3154 matches.advance();
3155 }
3156
3157 outdent_positions.sort();
3158 for outdent_position in outdent_positions {
3159 // find the innermost indent range containing this outdent_position
3160 // set its end to the outdent position
3161 if let Some(range_to_truncate) = indent_ranges
3162 .iter_mut()
3163 .filter(|indent_range| indent_range.contains(&outdent_position))
3164 .next_back()
3165 {
3166 range_to_truncate.end = outdent_position;
3167 }
3168 }
3169
3170 start_positions.sort_by_key(|b| b.start);
3171
3172 // Find the suggested indentation increases and decreased based on regexes.
3173 let mut regex_outdent_map = HashMap::default();
3174 let mut last_seen_suffix: HashMap<String, Vec<Point>> = HashMap::default();
3175 let mut start_positions_iter = start_positions.iter().peekable();
3176
3177 let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
3178 self.for_each_line(
3179 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
3180 ..Point::new(row_range.end, 0),
3181 |row, line| {
3182 if config
3183 .decrease_indent_pattern
3184 .as_ref()
3185 .is_some_and(|regex| regex.is_match(line))
3186 {
3187 indent_change_rows.push((row, Ordering::Less));
3188 }
3189 if config
3190 .increase_indent_pattern
3191 .as_ref()
3192 .is_some_and(|regex| regex.is_match(line))
3193 {
3194 indent_change_rows.push((row + 1, Ordering::Greater));
3195 }
3196 while let Some(pos) = start_positions_iter.peek() {
3197 if pos.start.row < row {
3198 let pos = start_positions_iter.next().unwrap();
3199 last_seen_suffix
3200 .entry(pos.suffix.to_string())
3201 .or_default()
3202 .push(pos.start);
3203 } else {
3204 break;
3205 }
3206 }
3207 for rule in &config.decrease_indent_patterns {
3208 if rule.pattern.as_ref().is_some_and(|r| r.is_match(line)) {
3209 let row_start_column = self.indent_size_for_line(row).len;
3210 let basis_row = rule
3211 .valid_after
3212 .iter()
3213 .filter_map(|valid_suffix| last_seen_suffix.get(valid_suffix))
3214 .flatten()
3215 .filter(|start_point| start_point.column <= row_start_column)
3216 .max_by_key(|start_point| start_point.row);
3217 if let Some(outdent_to_row) = basis_row {
3218 regex_outdent_map.insert(row, outdent_to_row.row);
3219 }
3220 break;
3221 }
3222 }
3223 },
3224 );
3225
3226 let mut indent_changes = indent_change_rows.into_iter().peekable();
3227 let mut prev_row = if config.auto_indent_using_last_non_empty_line {
3228 prev_non_blank_row.unwrap_or(0)
3229 } else {
3230 row_range.start.saturating_sub(1)
3231 };
3232
3233 let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
3234 Some(row_range.map(move |row| {
3235 let row_start = Point::new(row, self.indent_size_for_line(row).len);
3236
3237 let mut indent_from_prev_row = false;
3238 let mut outdent_from_prev_row = false;
3239 let mut outdent_to_row = u32::MAX;
3240 let mut from_regex = false;
3241
3242 while let Some((indent_row, delta)) = indent_changes.peek() {
3243 match indent_row.cmp(&row) {
3244 Ordering::Equal => match delta {
3245 Ordering::Less => {
3246 from_regex = true;
3247 outdent_from_prev_row = true
3248 }
3249 Ordering::Greater => {
3250 indent_from_prev_row = true;
3251 from_regex = true
3252 }
3253 _ => {}
3254 },
3255
3256 Ordering::Greater => break,
3257 Ordering::Less => {}
3258 }
3259
3260 indent_changes.next();
3261 }
3262
3263 for range in &indent_ranges {
3264 if range.start.row >= row {
3265 break;
3266 }
3267 if range.start.row == prev_row && range.end > row_start {
3268 indent_from_prev_row = true;
3269 }
3270 if range.end > prev_row_start && range.end <= row_start {
3271 outdent_to_row = outdent_to_row.min(range.start.row);
3272 }
3273 }
3274
3275 if let Some(basis_row) = regex_outdent_map.get(&row) {
3276 indent_from_prev_row = false;
3277 outdent_to_row = *basis_row;
3278 from_regex = true;
3279 }
3280
3281 let within_error = error_ranges
3282 .iter()
3283 .any(|e| e.start.row < row && e.end > row_start);
3284
3285 let suggestion = if outdent_to_row == prev_row
3286 || (outdent_from_prev_row && indent_from_prev_row)
3287 {
3288 Some(IndentSuggestion {
3289 basis_row: prev_row,
3290 delta: Ordering::Equal,
3291 within_error: within_error && !from_regex,
3292 })
3293 } else if indent_from_prev_row {
3294 Some(IndentSuggestion {
3295 basis_row: prev_row,
3296 delta: Ordering::Greater,
3297 within_error: within_error && !from_regex,
3298 })
3299 } else if outdent_to_row < prev_row {
3300 Some(IndentSuggestion {
3301 basis_row: outdent_to_row,
3302 delta: Ordering::Equal,
3303 within_error: within_error && !from_regex,
3304 })
3305 } else if outdent_from_prev_row {
3306 Some(IndentSuggestion {
3307 basis_row: prev_row,
3308 delta: Ordering::Less,
3309 within_error: within_error && !from_regex,
3310 })
3311 } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
3312 {
3313 Some(IndentSuggestion {
3314 basis_row: prev_row,
3315 delta: Ordering::Equal,
3316 within_error: within_error && !from_regex,
3317 })
3318 } else {
3319 None
3320 };
3321
3322 prev_row = row;
3323 prev_row_start = row_start;
3324 suggestion
3325 }))
3326 }
3327
3328 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
3329 while row > 0 {
3330 row -= 1;
3331 if !self.is_line_blank(row) {
3332 return Some(row);
3333 }
3334 }
3335 None
3336 }
3337
3338 fn get_highlights(&self, range: Range<usize>) -> (SyntaxMapCaptures<'_>, Vec<HighlightMap>) {
3339 let captures = self.syntax.captures(range, &self.text, |grammar| {
3340 grammar
3341 .highlights_config
3342 .as_ref()
3343 .map(|config| &config.query)
3344 });
3345 let highlight_maps = captures
3346 .grammars()
3347 .iter()
3348 .map(|grammar| grammar.highlight_map())
3349 .collect();
3350 (captures, highlight_maps)
3351 }
3352
3353 /// Iterates over chunks of text in the given range of the buffer. Text is chunked
3354 /// in an arbitrary way due to being stored in a [`Rope`](text::Rope). The text is also
3355 /// returned in chunks where each chunk has a single syntax highlighting style and
3356 /// diagnostic status.
3357 pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks<'_> {
3358 let range = range.start.to_offset(self)..range.end.to_offset(self);
3359
3360 let mut syntax = None;
3361 if language_aware {
3362 syntax = Some(self.get_highlights(range.clone()));
3363 }
3364 // We want to look at diagnostic spans only when iterating over language-annotated chunks.
3365 let diagnostics = language_aware;
3366 BufferChunks::new(self.text.as_rope(), range, syntax, diagnostics, Some(self))
3367 }
3368
3369 pub fn highlighted_text_for_range<T: ToOffset>(
3370 &self,
3371 range: Range<T>,
3372 override_style: Option<HighlightStyle>,
3373 syntax_theme: &SyntaxTheme,
3374 ) -> HighlightedText {
3375 HighlightedText::from_buffer_range(
3376 range,
3377 &self.text,
3378 &self.syntax,
3379 override_style,
3380 syntax_theme,
3381 )
3382 }
3383
3384 /// Invokes the given callback for each line of text in the given range of the buffer.
3385 /// Uses callback to avoid allocating a string for each line.
3386 fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
3387 let mut line = String::new();
3388 let mut row = range.start.row;
3389 for chunk in self
3390 .as_rope()
3391 .chunks_in_range(range.to_offset(self))
3392 .chain(["\n"])
3393 {
3394 for (newline_ix, text) in chunk.split('\n').enumerate() {
3395 if newline_ix > 0 {
3396 callback(row, &line);
3397 row += 1;
3398 line.clear();
3399 }
3400 line.push_str(text);
3401 }
3402 }
3403 }
3404
3405 /// Iterates over every [`SyntaxLayer`] in the buffer.
3406 pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3407 self.syntax_layers_for_range(0..self.len(), true)
3408 }
3409
3410 pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayer<'_>> {
3411 let offset = position.to_offset(self);
3412 self.syntax_layers_for_range(offset..offset, false)
3413 .filter(|l| {
3414 if let Some(ranges) = l.included_sub_ranges {
3415 ranges.iter().any(|range| {
3416 let start = range.start.to_offset(self);
3417 start <= offset && {
3418 let end = range.end.to_offset(self);
3419 offset < end
3420 }
3421 })
3422 } else {
3423 l.node().start_byte() <= offset && l.node().end_byte() > offset
3424 }
3425 })
3426 .last()
3427 }
3428
3429 pub fn syntax_layers_for_range<D: ToOffset>(
3430 &self,
3431 range: Range<D>,
3432 include_hidden: bool,
3433 ) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3434 self.syntax
3435 .layers_for_range(range, &self.text, include_hidden)
3436 }
3437
3438 pub fn smallest_syntax_layer_containing<D: ToOffset>(
3439 &self,
3440 range: Range<D>,
3441 ) -> Option<SyntaxLayer<'_>> {
3442 let range = range.to_offset(self);
3443 self.syntax
3444 .layers_for_range(range, &self.text, false)
3445 .max_by(|a, b| {
3446 if a.depth != b.depth {
3447 a.depth.cmp(&b.depth)
3448 } else if a.offset.0 != b.offset.0 {
3449 a.offset.0.cmp(&b.offset.0)
3450 } else {
3451 a.node().end_byte().cmp(&b.node().end_byte()).reverse()
3452 }
3453 })
3454 }
3455
3456 /// Returns the main [`Language`].
3457 pub fn language(&self) -> Option<&Arc<Language>> {
3458 self.language.as_ref()
3459 }
3460
3461 /// Returns the [`Language`] at the given location.
3462 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
3463 self.syntax_layer_at(position)
3464 .map(|info| info.language)
3465 .or(self.language.as_ref())
3466 }
3467
3468 /// Returns the settings for the language at the given location.
3469 pub fn settings_at<'a, D: ToOffset>(
3470 &'a self,
3471 position: D,
3472 cx: &'a App,
3473 ) -> Cow<'a, LanguageSettings> {
3474 language_settings(
3475 self.language_at(position).map(|l| l.name()),
3476 self.file.as_ref(),
3477 cx,
3478 )
3479 }
3480
3481 pub fn char_classifier_at<T: ToOffset>(&self, point: T) -> CharClassifier {
3482 CharClassifier::new(self.language_scope_at(point))
3483 }
3484
3485 /// Returns the [`LanguageScope`] at the given location.
3486 pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
3487 let offset = position.to_offset(self);
3488 let mut scope = None;
3489 let mut smallest_range_and_depth: Option<(Range<usize>, usize)> = None;
3490
3491 // Use the layer that has the smallest node intersecting the given point.
3492 for layer in self
3493 .syntax
3494 .layers_for_range(offset..offset, &self.text, false)
3495 {
3496 let mut cursor = layer.node().walk();
3497
3498 let mut range = None;
3499 loop {
3500 let child_range = cursor.node().byte_range();
3501 if !child_range.contains(&offset) {
3502 break;
3503 }
3504
3505 range = Some(child_range);
3506 if cursor.goto_first_child_for_byte(offset).is_none() {
3507 break;
3508 }
3509 }
3510
3511 if let Some(range) = range
3512 && smallest_range_and_depth.as_ref().is_none_or(
3513 |(smallest_range, smallest_range_depth)| {
3514 if layer.depth > *smallest_range_depth {
3515 true
3516 } else if layer.depth == *smallest_range_depth {
3517 range.len() < smallest_range.len()
3518 } else {
3519 false
3520 }
3521 },
3522 )
3523 {
3524 smallest_range_and_depth = Some((range, layer.depth));
3525 scope = Some(LanguageScope {
3526 language: layer.language.clone(),
3527 override_id: layer.override_id(offset, &self.text),
3528 });
3529 }
3530 }
3531
3532 scope.or_else(|| {
3533 self.language.clone().map(|language| LanguageScope {
3534 language,
3535 override_id: None,
3536 })
3537 })
3538 }
3539
3540 /// Returns a tuple of the range and character kind of the word
3541 /// surrounding the given position.
3542 pub fn surrounding_word<T: ToOffset>(
3543 &self,
3544 start: T,
3545 scope_context: Option<CharScopeContext>,
3546 ) -> (Range<usize>, Option<CharKind>) {
3547 let mut start = start.to_offset(self);
3548 let mut end = start;
3549 let mut next_chars = self.chars_at(start).take(128).peekable();
3550 let mut prev_chars = self.reversed_chars_at(start).take(128).peekable();
3551
3552 let classifier = self.char_classifier_at(start).scope_context(scope_context);
3553 let word_kind = cmp::max(
3554 prev_chars.peek().copied().map(|c| classifier.kind(c)),
3555 next_chars.peek().copied().map(|c| classifier.kind(c)),
3556 );
3557
3558 for ch in prev_chars {
3559 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3560 start -= ch.len_utf8();
3561 } else {
3562 break;
3563 }
3564 }
3565
3566 for ch in next_chars {
3567 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3568 end += ch.len_utf8();
3569 } else {
3570 break;
3571 }
3572 }
3573
3574 (start..end, word_kind)
3575 }
3576
3577 /// Moves the TreeCursor to the smallest descendant or ancestor syntax node enclosing the given
3578 /// range. When `require_larger` is true, the node found must be larger than the query range.
3579 ///
3580 /// Returns true if a node was found, and false otherwise. In the `false` case the cursor will
3581 /// be moved to the root of the tree.
3582 fn goto_node_enclosing_range(
3583 cursor: &mut tree_sitter::TreeCursor,
3584 query_range: &Range<usize>,
3585 require_larger: bool,
3586 ) -> bool {
3587 let mut ascending = false;
3588 loop {
3589 let mut range = cursor.node().byte_range();
3590 if query_range.is_empty() {
3591 // When the query range is empty and the current node starts after it, move to the
3592 // previous sibling to find the node the containing node.
3593 if range.start > query_range.start {
3594 cursor.goto_previous_sibling();
3595 range = cursor.node().byte_range();
3596 }
3597 } else {
3598 // When the query range is non-empty and the current node ends exactly at the start,
3599 // move to the next sibling to find a node that extends beyond the start.
3600 if range.end == query_range.start {
3601 cursor.goto_next_sibling();
3602 range = cursor.node().byte_range();
3603 }
3604 }
3605
3606 let encloses = range.contains_inclusive(query_range)
3607 && (!require_larger || range.len() > query_range.len());
3608 if !encloses {
3609 ascending = true;
3610 if !cursor.goto_parent() {
3611 return false;
3612 }
3613 continue;
3614 } else if ascending {
3615 return true;
3616 }
3617
3618 // Descend into the current node.
3619 if cursor
3620 .goto_first_child_for_byte(query_range.start)
3621 .is_none()
3622 {
3623 return true;
3624 }
3625 }
3626 }
3627
3628 pub fn syntax_ancestor<'a, T: ToOffset>(
3629 &'a self,
3630 range: Range<T>,
3631 ) -> Option<tree_sitter::Node<'a>> {
3632 let range = range.start.to_offset(self)..range.end.to_offset(self);
3633 let mut result: Option<tree_sitter::Node<'a>> = None;
3634 for layer in self
3635 .syntax
3636 .layers_for_range(range.clone(), &self.text, true)
3637 {
3638 let mut cursor = layer.node().walk();
3639
3640 // Find the node that both contains the range and is larger than it.
3641 if !Self::goto_node_enclosing_range(&mut cursor, &range, true) {
3642 continue;
3643 }
3644
3645 let left_node = cursor.node();
3646 let mut layer_result = left_node;
3647
3648 // For an empty range, try to find another node immediately to the right of the range.
3649 if left_node.end_byte() == range.start {
3650 let mut right_node = None;
3651 while !cursor.goto_next_sibling() {
3652 if !cursor.goto_parent() {
3653 break;
3654 }
3655 }
3656
3657 while cursor.node().start_byte() == range.start {
3658 right_node = Some(cursor.node());
3659 if !cursor.goto_first_child() {
3660 break;
3661 }
3662 }
3663
3664 // If there is a candidate node on both sides of the (empty) range, then
3665 // decide between the two by favoring a named node over an anonymous token.
3666 // If both nodes are the same in that regard, favor the right one.
3667 if let Some(right_node) = right_node
3668 && (right_node.is_named() || !left_node.is_named())
3669 {
3670 layer_result = right_node;
3671 }
3672 }
3673
3674 if let Some(previous_result) = &result
3675 && previous_result.byte_range().len() < layer_result.byte_range().len()
3676 {
3677 continue;
3678 }
3679 result = Some(layer_result);
3680 }
3681
3682 result
3683 }
3684
3685 /// Find the previous sibling syntax node at the given range.
3686 ///
3687 /// This function locates the syntax node that precedes the node containing
3688 /// the given range. It searches hierarchically by:
3689 /// 1. Finding the node that contains the given range
3690 /// 2. Looking for the previous sibling at the same tree level
3691 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
3692 ///
3693 /// Returns `None` if there is no previous sibling at any ancestor level.
3694 pub fn syntax_prev_sibling<'a, T: ToOffset>(
3695 &'a self,
3696 range: Range<T>,
3697 ) -> Option<tree_sitter::Node<'a>> {
3698 let range = range.start.to_offset(self)..range.end.to_offset(self);
3699 let mut result: Option<tree_sitter::Node<'a>> = None;
3700
3701 for layer in self
3702 .syntax
3703 .layers_for_range(range.clone(), &self.text, true)
3704 {
3705 let mut cursor = layer.node().walk();
3706
3707 // Find the node that contains the range
3708 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
3709 continue;
3710 }
3711
3712 // Look for the previous sibling, moving up ancestor levels if needed
3713 loop {
3714 if cursor.goto_previous_sibling() {
3715 let layer_result = cursor.node();
3716
3717 if let Some(previous_result) = &result {
3718 if previous_result.byte_range().end < layer_result.byte_range().end {
3719 continue;
3720 }
3721 }
3722 result = Some(layer_result);
3723 break;
3724 }
3725
3726 // No sibling found at this level, try moving up to parent
3727 if !cursor.goto_parent() {
3728 break;
3729 }
3730 }
3731 }
3732
3733 result
3734 }
3735
3736 /// Find the next sibling syntax node at the given range.
3737 ///
3738 /// This function locates the syntax node that follows the node containing
3739 /// the given range. It searches hierarchically by:
3740 /// 1. Finding the node that contains the given range
3741 /// 2. Looking for the next sibling at the same tree level
3742 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
3743 ///
3744 /// Returns `None` if there is no next sibling at any ancestor level.
3745 pub fn syntax_next_sibling<'a, T: ToOffset>(
3746 &'a self,
3747 range: Range<T>,
3748 ) -> Option<tree_sitter::Node<'a>> {
3749 let range = range.start.to_offset(self)..range.end.to_offset(self);
3750 let mut result: Option<tree_sitter::Node<'a>> = None;
3751
3752 for layer in self
3753 .syntax
3754 .layers_for_range(range.clone(), &self.text, true)
3755 {
3756 let mut cursor = layer.node().walk();
3757
3758 // Find the node that contains the range
3759 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
3760 continue;
3761 }
3762
3763 // Look for the next sibling, moving up ancestor levels if needed
3764 loop {
3765 if cursor.goto_next_sibling() {
3766 let layer_result = cursor.node();
3767
3768 if let Some(previous_result) = &result {
3769 if previous_result.byte_range().start > layer_result.byte_range().start {
3770 continue;
3771 }
3772 }
3773 result = Some(layer_result);
3774 break;
3775 }
3776
3777 // No sibling found at this level, try moving up to parent
3778 if !cursor.goto_parent() {
3779 break;
3780 }
3781 }
3782 }
3783
3784 result
3785 }
3786
3787 /// Returns the root syntax node within the given row
3788 pub fn syntax_root_ancestor(&self, position: Anchor) -> Option<tree_sitter::Node<'_>> {
3789 let start_offset = position.to_offset(self);
3790
3791 let row = self.summary_for_anchor::<text::PointUtf16>(&position).row as usize;
3792
3793 let layer = self
3794 .syntax
3795 .layers_for_range(start_offset..start_offset, &self.text, true)
3796 .next()?;
3797
3798 let mut cursor = layer.node().walk();
3799
3800 // Descend to the first leaf that touches the start of the range.
3801 while cursor.goto_first_child_for_byte(start_offset).is_some() {
3802 if cursor.node().end_byte() == start_offset {
3803 cursor.goto_next_sibling();
3804 }
3805 }
3806
3807 // Ascend to the root node within the same row.
3808 while cursor.goto_parent() {
3809 if cursor.node().start_position().row != row {
3810 break;
3811 }
3812 }
3813
3814 Some(cursor.node())
3815 }
3816
3817 /// Returns the outline for the buffer.
3818 ///
3819 /// This method allows passing an optional [`SyntaxTheme`] to
3820 /// syntax-highlight the returned symbols.
3821 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Outline<Anchor> {
3822 Outline::new(self.outline_items_containing(0..self.len(), true, theme))
3823 }
3824
3825 /// Returns all the symbols that contain the given position.
3826 ///
3827 /// This method allows passing an optional [`SyntaxTheme`] to
3828 /// syntax-highlight the returned symbols.
3829 pub fn symbols_containing<T: ToOffset>(
3830 &self,
3831 position: T,
3832 theme: Option<&SyntaxTheme>,
3833 ) -> Vec<OutlineItem<Anchor>> {
3834 let position = position.to_offset(self);
3835 let start = self.clip_offset(position.saturating_sub(1), Bias::Left);
3836 let end = self.clip_offset(position + 1, Bias::Right);
3837 let mut items = self.outline_items_containing(start..end, false, theme);
3838 let mut prev_depth = None;
3839 items.retain(|item| {
3840 let result = prev_depth.is_none_or(|prev_depth| item.depth > prev_depth);
3841 prev_depth = Some(item.depth);
3842 result
3843 });
3844 items
3845 }
3846
3847 pub fn outline_range_containing<T: ToOffset>(&self, range: Range<T>) -> Option<Range<Point>> {
3848 let range = range.to_offset(self);
3849 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3850 grammar.outline_config.as_ref().map(|c| &c.query)
3851 });
3852 let configs = matches
3853 .grammars()
3854 .iter()
3855 .map(|g| g.outline_config.as_ref().unwrap())
3856 .collect::<Vec<_>>();
3857
3858 while let Some(mat) = matches.peek() {
3859 let config = &configs[mat.grammar_index];
3860 let containing_item_node = maybe!({
3861 let item_node = mat.captures.iter().find_map(|cap| {
3862 if cap.index == config.item_capture_ix {
3863 Some(cap.node)
3864 } else {
3865 None
3866 }
3867 })?;
3868
3869 let item_byte_range = item_node.byte_range();
3870 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3871 None
3872 } else {
3873 Some(item_node)
3874 }
3875 });
3876
3877 if let Some(item_node) = containing_item_node {
3878 return Some(
3879 Point::from_ts_point(item_node.start_position())
3880 ..Point::from_ts_point(item_node.end_position()),
3881 );
3882 }
3883
3884 matches.advance();
3885 }
3886 None
3887 }
3888
3889 pub fn outline_items_containing<T: ToOffset>(
3890 &self,
3891 range: Range<T>,
3892 include_extra_context: bool,
3893 theme: Option<&SyntaxTheme>,
3894 ) -> Vec<OutlineItem<Anchor>> {
3895 self.outline_items_containing_internal(
3896 range,
3897 include_extra_context,
3898 theme,
3899 |this, range| this.anchor_after(range.start)..this.anchor_before(range.end),
3900 )
3901 }
3902
3903 pub fn outline_items_as_points_containing<T: ToOffset>(
3904 &self,
3905 range: Range<T>,
3906 include_extra_context: bool,
3907 theme: Option<&SyntaxTheme>,
3908 ) -> Vec<OutlineItem<Point>> {
3909 self.outline_items_containing_internal(range, include_extra_context, theme, |_, range| {
3910 range
3911 })
3912 }
3913
3914 fn outline_items_containing_internal<T: ToOffset, U>(
3915 &self,
3916 range: Range<T>,
3917 include_extra_context: bool,
3918 theme: Option<&SyntaxTheme>,
3919 range_callback: fn(&Self, Range<Point>) -> Range<U>,
3920 ) -> Vec<OutlineItem<U>> {
3921 let range = range.to_offset(self);
3922 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3923 grammar.outline_config.as_ref().map(|c| &c.query)
3924 });
3925
3926 let mut items = Vec::new();
3927 let mut annotation_row_ranges: Vec<Range<u32>> = Vec::new();
3928 while let Some(mat) = matches.peek() {
3929 let config = matches.grammars()[mat.grammar_index]
3930 .outline_config
3931 .as_ref()
3932 .unwrap();
3933 if let Some(item) =
3934 self.next_outline_item(config, &mat, &range, include_extra_context, theme)
3935 {
3936 items.push(item);
3937 } else if let Some(capture) = mat
3938 .captures
3939 .iter()
3940 .find(|capture| Some(capture.index) == config.annotation_capture_ix)
3941 {
3942 let capture_range = capture.node.start_position()..capture.node.end_position();
3943 let mut capture_row_range =
3944 capture_range.start.row as u32..capture_range.end.row as u32;
3945 if capture_range.end.row > capture_range.start.row && capture_range.end.column == 0
3946 {
3947 capture_row_range.end -= 1;
3948 }
3949 if let Some(last_row_range) = annotation_row_ranges.last_mut() {
3950 if last_row_range.end >= capture_row_range.start.saturating_sub(1) {
3951 last_row_range.end = capture_row_range.end;
3952 } else {
3953 annotation_row_ranges.push(capture_row_range);
3954 }
3955 } else {
3956 annotation_row_ranges.push(capture_row_range);
3957 }
3958 }
3959 matches.advance();
3960 }
3961
3962 items.sort_by_key(|item| (item.range.start, Reverse(item.range.end)));
3963
3964 // Assign depths based on containment relationships and convert to anchors.
3965 let mut item_ends_stack = Vec::<Point>::new();
3966 let mut anchor_items = Vec::new();
3967 let mut annotation_row_ranges = annotation_row_ranges.into_iter().peekable();
3968 for item in items {
3969 while let Some(last_end) = item_ends_stack.last().copied() {
3970 if last_end < item.range.end {
3971 item_ends_stack.pop();
3972 } else {
3973 break;
3974 }
3975 }
3976
3977 let mut annotation_row_range = None;
3978 while let Some(next_annotation_row_range) = annotation_row_ranges.peek() {
3979 let row_preceding_item = item.range.start.row.saturating_sub(1);
3980 if next_annotation_row_range.end < row_preceding_item {
3981 annotation_row_ranges.next();
3982 } else {
3983 if next_annotation_row_range.end == row_preceding_item {
3984 annotation_row_range = Some(next_annotation_row_range.clone());
3985 annotation_row_ranges.next();
3986 }
3987 break;
3988 }
3989 }
3990
3991 anchor_items.push(OutlineItem {
3992 depth: item_ends_stack.len(),
3993 range: range_callback(self, item.range.clone()),
3994 source_range_for_text: range_callback(self, item.source_range_for_text.clone()),
3995 text: item.text,
3996 highlight_ranges: item.highlight_ranges,
3997 name_ranges: item.name_ranges,
3998 body_range: item.body_range.map(|r| range_callback(self, r)),
3999 annotation_range: annotation_row_range.map(|annotation_range| {
4000 let point_range = Point::new(annotation_range.start, 0)
4001 ..Point::new(annotation_range.end, self.line_len(annotation_range.end));
4002 range_callback(self, point_range)
4003 }),
4004 });
4005 item_ends_stack.push(item.range.end);
4006 }
4007
4008 anchor_items
4009 }
4010
4011 fn next_outline_item(
4012 &self,
4013 config: &OutlineConfig,
4014 mat: &SyntaxMapMatch,
4015 range: &Range<usize>,
4016 include_extra_context: bool,
4017 theme: Option<&SyntaxTheme>,
4018 ) -> Option<OutlineItem<Point>> {
4019 let item_node = mat.captures.iter().find_map(|cap| {
4020 if cap.index == config.item_capture_ix {
4021 Some(cap.node)
4022 } else {
4023 None
4024 }
4025 })?;
4026
4027 let item_byte_range = item_node.byte_range();
4028 if item_byte_range.end < range.start || item_byte_range.start > range.end {
4029 return None;
4030 }
4031 let item_point_range = Point::from_ts_point(item_node.start_position())
4032 ..Point::from_ts_point(item_node.end_position());
4033
4034 let mut open_point = None;
4035 let mut close_point = None;
4036
4037 let mut buffer_ranges = Vec::new();
4038 let mut add_to_buffer_ranges = |node: tree_sitter::Node, node_is_name| {
4039 let mut range = node.start_byte()..node.end_byte();
4040 let start = node.start_position();
4041 if node.end_position().row > start.row {
4042 range.end = range.start + self.line_len(start.row as u32) as usize - start.column;
4043 }
4044
4045 if !range.is_empty() {
4046 buffer_ranges.push((range, node_is_name));
4047 }
4048 };
4049
4050 for capture in mat.captures {
4051 if capture.index == config.name_capture_ix {
4052 add_to_buffer_ranges(capture.node, true);
4053 } else if Some(capture.index) == config.context_capture_ix
4054 || (Some(capture.index) == config.extra_context_capture_ix && include_extra_context)
4055 {
4056 add_to_buffer_ranges(capture.node, false);
4057 } else {
4058 if Some(capture.index) == config.open_capture_ix {
4059 open_point = Some(Point::from_ts_point(capture.node.end_position()));
4060 } else if Some(capture.index) == config.close_capture_ix {
4061 close_point = Some(Point::from_ts_point(capture.node.start_position()));
4062 }
4063 }
4064 }
4065
4066 if buffer_ranges.is_empty() {
4067 return None;
4068 }
4069 let source_range_for_text =
4070 buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end;
4071
4072 let mut text = String::new();
4073 let mut highlight_ranges = Vec::new();
4074 let mut name_ranges = Vec::new();
4075 let mut chunks = self.chunks(source_range_for_text.clone(), true);
4076 let mut last_buffer_range_end = 0;
4077 for (buffer_range, is_name) in buffer_ranges {
4078 let space_added = !text.is_empty() && buffer_range.start > last_buffer_range_end;
4079 if space_added {
4080 text.push(' ');
4081 }
4082 let before_append_len = text.len();
4083 let mut offset = buffer_range.start;
4084 chunks.seek(buffer_range.clone());
4085 for mut chunk in chunks.by_ref() {
4086 if chunk.text.len() > buffer_range.end - offset {
4087 chunk.text = &chunk.text[0..(buffer_range.end - offset)];
4088 offset = buffer_range.end;
4089 } else {
4090 offset += chunk.text.len();
4091 }
4092 let style = chunk
4093 .syntax_highlight_id
4094 .zip(theme)
4095 .and_then(|(highlight, theme)| highlight.style(theme));
4096 if let Some(style) = style {
4097 let start = text.len();
4098 let end = start + chunk.text.len();
4099 highlight_ranges.push((start..end, style));
4100 }
4101 text.push_str(chunk.text);
4102 if offset >= buffer_range.end {
4103 break;
4104 }
4105 }
4106 if is_name {
4107 let after_append_len = text.len();
4108 let start = if space_added && !name_ranges.is_empty() {
4109 before_append_len - 1
4110 } else {
4111 before_append_len
4112 };
4113 name_ranges.push(start..after_append_len);
4114 }
4115 last_buffer_range_end = buffer_range.end;
4116 }
4117
4118 Some(OutlineItem {
4119 depth: 0, // We'll calculate the depth later
4120 range: item_point_range,
4121 source_range_for_text: source_range_for_text.to_point(self),
4122 text,
4123 highlight_ranges,
4124 name_ranges,
4125 body_range: open_point.zip(close_point).map(|(start, end)| start..end),
4126 annotation_range: None,
4127 })
4128 }
4129
4130 pub fn function_body_fold_ranges<T: ToOffset>(
4131 &self,
4132 within: Range<T>,
4133 ) -> impl Iterator<Item = Range<usize>> + '_ {
4134 self.text_object_ranges(within, TreeSitterOptions::default())
4135 .filter_map(|(range, obj)| (obj == TextObject::InsideFunction).then_some(range))
4136 }
4137
4138 /// For each grammar in the language, runs the provided
4139 /// [`tree_sitter::Query`] against the given range.
4140 pub fn matches(
4141 &self,
4142 range: Range<usize>,
4143 query: fn(&Grammar) -> Option<&tree_sitter::Query>,
4144 ) -> SyntaxMapMatches<'_> {
4145 self.syntax.matches(range, self, query)
4146 }
4147
4148 /// Returns all bracket pairs that intersect with the range given.
4149 ///
4150 /// The resulting collection is not ordered.
4151 fn fetch_bracket_ranges(&self, range: Range<usize>) -> Vec<BracketMatch> {
4152 let mut tree_sitter_data = self.latest_tree_sitter_data().clone();
4153 let mut new_bracket_matches = HashMap::default();
4154 let mut all_bracket_matches = Vec::new();
4155 for chunk in tree_sitter_data
4156 .chunks
4157 .applicable_chunks(&[self.anchor_before(range.start)..self.anchor_after(range.end)])
4158 {
4159 let chunk_brackets = tree_sitter_data.brackets_by_chunks.remove(chunk.id);
4160 let bracket_matches = match chunk_brackets {
4161 Some(cached_brackets) => cached_brackets,
4162 None => {
4163 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
4164 grammar.brackets_config.as_ref().map(|c| &c.query)
4165 });
4166 let configs = matches
4167 .grammars()
4168 .iter()
4169 .map(|grammar| grammar.brackets_config.as_ref().unwrap())
4170 .collect::<Vec<_>>();
4171
4172 // todo!
4173 let mut depth = 0;
4174 let range = range.clone();
4175 let new_matches = iter::from_fn(move || {
4176 while let Some(mat) = matches.peek() {
4177 let mut open = None;
4178 let mut close = None;
4179 let config = configs[mat.grammar_index];
4180 let pattern = &config.patterns[mat.pattern_index];
4181 for capture in mat.captures {
4182 if capture.index == config.open_capture_ix {
4183 open = Some(capture.node.byte_range());
4184 } else if capture.index == config.close_capture_ix {
4185 close = Some(capture.node.byte_range());
4186 }
4187 }
4188
4189 matches.advance();
4190
4191 let Some((open_range, close_range)) = open.zip(close) else {
4192 continue;
4193 };
4194
4195 let bracket_range = open_range.start..=close_range.end;
4196 if !bracket_range.overlaps(&range) {
4197 continue;
4198 }
4199
4200 depth += 1;
4201
4202 return Some(BracketMatch {
4203 open_range,
4204 close_range,
4205 newline_only: pattern.newline_only,
4206 depth,
4207 });
4208 }
4209 None
4210 })
4211 .collect::<Vec<_>>();
4212
4213 new_bracket_matches.insert(chunk.id, new_matches.clone());
4214 new_matches
4215 }
4216 };
4217 all_bracket_matches.extend(bracket_matches);
4218 }
4219
4220 let mut latest_tree_sitter_data = self.latest_tree_sitter_data();
4221 if latest_tree_sitter_data.chunks.version() == &self.version {
4222 for (chunk_id, new_matches) in new_bracket_matches {
4223 let old_chunks = &mut latest_tree_sitter_data.brackets_by_chunks[chunk_id];
4224 if old_chunks.is_none() {
4225 *old_chunks = Some(new_matches);
4226 }
4227 }
4228 }
4229
4230 all_bracket_matches
4231 }
4232
4233 fn latest_tree_sitter_data(&self) -> MutexGuard<'_, RawMutex, TreeSitterData> {
4234 let mut tree_sitter_data = self.tree_sitter_data.lock();
4235 if self
4236 .version
4237 .changed_since(tree_sitter_data.chunks.version())
4238 {
4239 *tree_sitter_data = TreeSitterData::new(self.text.clone());
4240 }
4241 tree_sitter_data
4242 }
4243
4244 pub fn all_bracket_ranges(&self, range: Range<usize>) -> Vec<BracketMatch> {
4245 self.fetch_bracket_ranges(range)
4246 }
4247
4248 /// Returns bracket range pairs overlapping or adjacent to `range`
4249 pub fn bracket_ranges<T: ToOffset>(
4250 &self,
4251 range: Range<T>,
4252 ) -> impl Iterator<Item = BracketMatch> + '_ {
4253 // Find bracket pairs that *inclusively* contain the given range.
4254 let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self);
4255 self.all_bracket_ranges(range)
4256 .into_iter()
4257 .filter(|pair| !pair.newline_only)
4258 }
4259
4260 pub fn debug_variables_query<T: ToOffset>(
4261 &self,
4262 range: Range<T>,
4263 ) -> impl Iterator<Item = (Range<usize>, DebuggerTextObject)> + '_ {
4264 let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self);
4265
4266 let mut matches = self.syntax.matches_with_options(
4267 range.clone(),
4268 &self.text,
4269 TreeSitterOptions::default(),
4270 |grammar| grammar.debug_variables_config.as_ref().map(|c| &c.query),
4271 );
4272
4273 let configs = matches
4274 .grammars()
4275 .iter()
4276 .map(|grammar| grammar.debug_variables_config.as_ref())
4277 .collect::<Vec<_>>();
4278
4279 let mut captures = Vec::<(Range<usize>, DebuggerTextObject)>::new();
4280
4281 iter::from_fn(move || {
4282 loop {
4283 while let Some(capture) = captures.pop() {
4284 if capture.0.overlaps(&range) {
4285 return Some(capture);
4286 }
4287 }
4288
4289 let mat = matches.peek()?;
4290
4291 let Some(config) = configs[mat.grammar_index].as_ref() else {
4292 matches.advance();
4293 continue;
4294 };
4295
4296 for capture in mat.captures {
4297 let Some(ix) = config
4298 .objects_by_capture_ix
4299 .binary_search_by_key(&capture.index, |e| e.0)
4300 .ok()
4301 else {
4302 continue;
4303 };
4304 let text_object = config.objects_by_capture_ix[ix].1;
4305 let byte_range = capture.node.byte_range();
4306
4307 let mut found = false;
4308 for (range, existing) in captures.iter_mut() {
4309 if existing == &text_object {
4310 range.start = range.start.min(byte_range.start);
4311 range.end = range.end.max(byte_range.end);
4312 found = true;
4313 break;
4314 }
4315 }
4316
4317 if !found {
4318 captures.push((byte_range, text_object));
4319 }
4320 }
4321
4322 matches.advance();
4323 }
4324 })
4325 }
4326
4327 pub fn text_object_ranges<T: ToOffset>(
4328 &self,
4329 range: Range<T>,
4330 options: TreeSitterOptions,
4331 ) -> impl Iterator<Item = (Range<usize>, TextObject)> + '_ {
4332 let range =
4333 range.start.to_previous_offset(self)..self.len().min(range.end.to_next_offset(self));
4334
4335 let mut matches =
4336 self.syntax
4337 .matches_with_options(range.clone(), &self.text, options, |grammar| {
4338 grammar.text_object_config.as_ref().map(|c| &c.query)
4339 });
4340
4341 let configs = matches
4342 .grammars()
4343 .iter()
4344 .map(|grammar| grammar.text_object_config.as_ref())
4345 .collect::<Vec<_>>();
4346
4347 let mut captures = Vec::<(Range<usize>, TextObject)>::new();
4348
4349 iter::from_fn(move || {
4350 loop {
4351 while let Some(capture) = captures.pop() {
4352 if capture.0.overlaps(&range) {
4353 return Some(capture);
4354 }
4355 }
4356
4357 let mat = matches.peek()?;
4358
4359 let Some(config) = configs[mat.grammar_index].as_ref() else {
4360 matches.advance();
4361 continue;
4362 };
4363
4364 for capture in mat.captures {
4365 let Some(ix) = config
4366 .text_objects_by_capture_ix
4367 .binary_search_by_key(&capture.index, |e| e.0)
4368 .ok()
4369 else {
4370 continue;
4371 };
4372 let text_object = config.text_objects_by_capture_ix[ix].1;
4373 let byte_range = capture.node.byte_range();
4374
4375 let mut found = false;
4376 for (range, existing) in captures.iter_mut() {
4377 if existing == &text_object {
4378 range.start = range.start.min(byte_range.start);
4379 range.end = range.end.max(byte_range.end);
4380 found = true;
4381 break;
4382 }
4383 }
4384
4385 if !found {
4386 captures.push((byte_range, text_object));
4387 }
4388 }
4389
4390 matches.advance();
4391 }
4392 })
4393 }
4394
4395 /// Returns enclosing bracket ranges containing the given range
4396 pub fn enclosing_bracket_ranges<T: ToOffset>(
4397 &self,
4398 range: Range<T>,
4399 ) -> impl Iterator<Item = BracketMatch> + '_ {
4400 let range = range.start.to_offset(self)..range.end.to_offset(self);
4401
4402 self.bracket_ranges(range.clone()).filter(move |pair| {
4403 pair.open_range.start <= range.start && pair.close_range.end >= range.end
4404 })
4405 }
4406
4407 /// Returns the smallest enclosing bracket ranges containing the given range or None if no brackets contain range
4408 ///
4409 /// Can optionally pass a range_filter to filter the ranges of brackets to consider
4410 pub fn innermost_enclosing_bracket_ranges<T: ToOffset>(
4411 &self,
4412 range: Range<T>,
4413 range_filter: Option<&dyn Fn(Range<usize>, Range<usize>) -> bool>,
4414 ) -> Option<(Range<usize>, Range<usize>)> {
4415 let range = range.start.to_offset(self)..range.end.to_offset(self);
4416
4417 // Get the ranges of the innermost pair of brackets.
4418 let mut result: Option<(Range<usize>, Range<usize>)> = None;
4419
4420 for pair in self.enclosing_bracket_ranges(range) {
4421 if let Some(range_filter) = range_filter
4422 && !range_filter(pair.open_range.clone(), pair.close_range.clone())
4423 {
4424 continue;
4425 }
4426
4427 let len = pair.close_range.end - pair.open_range.start;
4428
4429 if let Some((existing_open, existing_close)) = &result {
4430 let existing_len = existing_close.end - existing_open.start;
4431 if len > existing_len {
4432 continue;
4433 }
4434 }
4435
4436 result = Some((pair.open_range, pair.close_range));
4437 }
4438
4439 result
4440 }
4441
4442 /// Returns anchor ranges for any matches of the redaction query.
4443 /// The buffer can be associated with multiple languages, and the redaction query associated with each
4444 /// will be run on the relevant section of the buffer.
4445 pub fn redacted_ranges<T: ToOffset>(
4446 &self,
4447 range: Range<T>,
4448 ) -> impl Iterator<Item = Range<usize>> + '_ {
4449 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4450 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4451 grammar
4452 .redactions_config
4453 .as_ref()
4454 .map(|config| &config.query)
4455 });
4456
4457 let configs = syntax_matches
4458 .grammars()
4459 .iter()
4460 .map(|grammar| grammar.redactions_config.as_ref())
4461 .collect::<Vec<_>>();
4462
4463 iter::from_fn(move || {
4464 let redacted_range = syntax_matches
4465 .peek()
4466 .and_then(|mat| {
4467 configs[mat.grammar_index].and_then(|config| {
4468 mat.captures
4469 .iter()
4470 .find(|capture| capture.index == config.redaction_capture_ix)
4471 })
4472 })
4473 .map(|mat| mat.node.byte_range());
4474 syntax_matches.advance();
4475 redacted_range
4476 })
4477 }
4478
4479 pub fn injections_intersecting_range<T: ToOffset>(
4480 &self,
4481 range: Range<T>,
4482 ) -> impl Iterator<Item = (Range<usize>, &Arc<Language>)> + '_ {
4483 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4484
4485 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4486 grammar
4487 .injection_config
4488 .as_ref()
4489 .map(|config| &config.query)
4490 });
4491
4492 let configs = syntax_matches
4493 .grammars()
4494 .iter()
4495 .map(|grammar| grammar.injection_config.as_ref())
4496 .collect::<Vec<_>>();
4497
4498 iter::from_fn(move || {
4499 let ranges = syntax_matches.peek().and_then(|mat| {
4500 let config = &configs[mat.grammar_index]?;
4501 let content_capture_range = mat.captures.iter().find_map(|capture| {
4502 if capture.index == config.content_capture_ix {
4503 Some(capture.node.byte_range())
4504 } else {
4505 None
4506 }
4507 })?;
4508 let language = self.language_at(content_capture_range.start)?;
4509 Some((content_capture_range, language))
4510 });
4511 syntax_matches.advance();
4512 ranges
4513 })
4514 }
4515
4516 pub fn runnable_ranges(
4517 &self,
4518 offset_range: Range<usize>,
4519 ) -> impl Iterator<Item = RunnableRange> + '_ {
4520 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4521 grammar.runnable_config.as_ref().map(|config| &config.query)
4522 });
4523
4524 let test_configs = syntax_matches
4525 .grammars()
4526 .iter()
4527 .map(|grammar| grammar.runnable_config.as_ref())
4528 .collect::<Vec<_>>();
4529
4530 iter::from_fn(move || {
4531 loop {
4532 let mat = syntax_matches.peek()?;
4533
4534 let test_range = test_configs[mat.grammar_index].and_then(|test_configs| {
4535 let mut run_range = None;
4536 let full_range = mat.captures.iter().fold(
4537 Range {
4538 start: usize::MAX,
4539 end: 0,
4540 },
4541 |mut acc, next| {
4542 let byte_range = next.node.byte_range();
4543 if acc.start > byte_range.start {
4544 acc.start = byte_range.start;
4545 }
4546 if acc.end < byte_range.end {
4547 acc.end = byte_range.end;
4548 }
4549 acc
4550 },
4551 );
4552 if full_range.start > full_range.end {
4553 // We did not find a full spanning range of this match.
4554 return None;
4555 }
4556 let extra_captures: SmallVec<[_; 1]> =
4557 SmallVec::from_iter(mat.captures.iter().filter_map(|capture| {
4558 test_configs
4559 .extra_captures
4560 .get(capture.index as usize)
4561 .cloned()
4562 .and_then(|tag_name| match tag_name {
4563 RunnableCapture::Named(name) => {
4564 Some((capture.node.byte_range(), name))
4565 }
4566 RunnableCapture::Run => {
4567 let _ = run_range.insert(capture.node.byte_range());
4568 None
4569 }
4570 })
4571 }));
4572 let run_range = run_range?;
4573 let tags = test_configs
4574 .query
4575 .property_settings(mat.pattern_index)
4576 .iter()
4577 .filter_map(|property| {
4578 if *property.key == *"tag" {
4579 property
4580 .value
4581 .as_ref()
4582 .map(|value| RunnableTag(value.to_string().into()))
4583 } else {
4584 None
4585 }
4586 })
4587 .collect();
4588 let extra_captures = extra_captures
4589 .into_iter()
4590 .map(|(range, name)| {
4591 (
4592 name.to_string(),
4593 self.text_for_range(range).collect::<String>(),
4594 )
4595 })
4596 .collect();
4597 // All tags should have the same range.
4598 Some(RunnableRange {
4599 run_range,
4600 full_range,
4601 runnable: Runnable {
4602 tags,
4603 language: mat.language,
4604 buffer: self.remote_id(),
4605 },
4606 extra_captures,
4607 buffer_id: self.remote_id(),
4608 })
4609 });
4610
4611 syntax_matches.advance();
4612 if test_range.is_some() {
4613 // It's fine for us to short-circuit on .peek()? returning None. We don't want to return None from this iter if we
4614 // had a capture that did not contain a run marker, hence we'll just loop around for the next capture.
4615 return test_range;
4616 }
4617 }
4618 })
4619 }
4620
4621 /// Returns selections for remote peers intersecting the given range.
4622 #[allow(clippy::type_complexity)]
4623 pub fn selections_in_range(
4624 &self,
4625 range: Range<Anchor>,
4626 include_local: bool,
4627 ) -> impl Iterator<
4628 Item = (
4629 ReplicaId,
4630 bool,
4631 CursorShape,
4632 impl Iterator<Item = &Selection<Anchor>> + '_,
4633 ),
4634 > + '_ {
4635 self.remote_selections
4636 .iter()
4637 .filter(move |(replica_id, set)| {
4638 (include_local || **replica_id != self.text.replica_id())
4639 && !set.selections.is_empty()
4640 })
4641 .map(move |(replica_id, set)| {
4642 let start_ix = match set.selections.binary_search_by(|probe| {
4643 probe.end.cmp(&range.start, self).then(Ordering::Greater)
4644 }) {
4645 Ok(ix) | Err(ix) => ix,
4646 };
4647 let end_ix = match set.selections.binary_search_by(|probe| {
4648 probe.start.cmp(&range.end, self).then(Ordering::Less)
4649 }) {
4650 Ok(ix) | Err(ix) => ix,
4651 };
4652
4653 (
4654 *replica_id,
4655 set.line_mode,
4656 set.cursor_shape,
4657 set.selections[start_ix..end_ix].iter(),
4658 )
4659 })
4660 }
4661
4662 /// Returns if the buffer contains any diagnostics.
4663 pub fn has_diagnostics(&self) -> bool {
4664 !self.diagnostics.is_empty()
4665 }
4666
4667 /// Returns all the diagnostics intersecting the given range.
4668 pub fn diagnostics_in_range<'a, T, O>(
4669 &'a self,
4670 search_range: Range<T>,
4671 reversed: bool,
4672 ) -> impl 'a + Iterator<Item = DiagnosticEntryRef<'a, O>>
4673 where
4674 T: 'a + Clone + ToOffset,
4675 O: 'a + FromAnchor,
4676 {
4677 let mut iterators: Vec<_> = self
4678 .diagnostics
4679 .iter()
4680 .map(|(_, collection)| {
4681 collection
4682 .range::<T, text::Anchor>(search_range.clone(), self, true, reversed)
4683 .peekable()
4684 })
4685 .collect();
4686
4687 std::iter::from_fn(move || {
4688 let (next_ix, _) = iterators
4689 .iter_mut()
4690 .enumerate()
4691 .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
4692 .min_by(|(_, a), (_, b)| {
4693 let cmp = a
4694 .range
4695 .start
4696 .cmp(&b.range.start, self)
4697 // when range is equal, sort by diagnostic severity
4698 .then(a.diagnostic.severity.cmp(&b.diagnostic.severity))
4699 // and stabilize order with group_id
4700 .then(a.diagnostic.group_id.cmp(&b.diagnostic.group_id));
4701 if reversed { cmp.reverse() } else { cmp }
4702 })?;
4703 iterators[next_ix]
4704 .next()
4705 .map(
4706 |DiagnosticEntryRef { range, diagnostic }| DiagnosticEntryRef {
4707 diagnostic,
4708 range: FromAnchor::from_anchor(&range.start, self)
4709 ..FromAnchor::from_anchor(&range.end, self),
4710 },
4711 )
4712 })
4713 }
4714
4715 /// Raw access to the diagnostic sets. Typically `diagnostic_groups` or `diagnostic_group`
4716 /// should be used instead.
4717 pub fn diagnostic_sets(&self) -> &SmallVec<[(LanguageServerId, DiagnosticSet); 2]> {
4718 &self.diagnostics
4719 }
4720
4721 /// Returns all the diagnostic groups associated with the given
4722 /// language server ID. If no language server ID is provided,
4723 /// all diagnostics groups are returned.
4724 pub fn diagnostic_groups(
4725 &self,
4726 language_server_id: Option<LanguageServerId>,
4727 ) -> Vec<(LanguageServerId, DiagnosticGroup<'_, Anchor>)> {
4728 let mut groups = Vec::new();
4729
4730 if let Some(language_server_id) = language_server_id {
4731 if let Ok(ix) = self
4732 .diagnostics
4733 .binary_search_by_key(&language_server_id, |e| e.0)
4734 {
4735 self.diagnostics[ix]
4736 .1
4737 .groups(language_server_id, &mut groups, self);
4738 }
4739 } else {
4740 for (language_server_id, diagnostics) in self.diagnostics.iter() {
4741 diagnostics.groups(*language_server_id, &mut groups, self);
4742 }
4743 }
4744
4745 groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
4746 let a_start = &group_a.entries[group_a.primary_ix].range.start;
4747 let b_start = &group_b.entries[group_b.primary_ix].range.start;
4748 a_start.cmp(b_start, self).then_with(|| id_a.cmp(id_b))
4749 });
4750
4751 groups
4752 }
4753
4754 /// Returns an iterator over the diagnostics for the given group.
4755 pub fn diagnostic_group<O>(
4756 &self,
4757 group_id: usize,
4758 ) -> impl Iterator<Item = DiagnosticEntryRef<'_, O>> + use<'_, O>
4759 where
4760 O: FromAnchor + 'static,
4761 {
4762 self.diagnostics
4763 .iter()
4764 .flat_map(move |(_, set)| set.group(group_id, self))
4765 }
4766
4767 /// An integer version number that accounts for all updates besides
4768 /// the buffer's text itself (which is versioned via a version vector).
4769 pub fn non_text_state_update_count(&self) -> usize {
4770 self.non_text_state_update_count
4771 }
4772
4773 /// An integer version that changes when the buffer's syntax changes.
4774 pub fn syntax_update_count(&self) -> usize {
4775 self.syntax.update_count()
4776 }
4777
4778 /// Returns a snapshot of underlying file.
4779 pub fn file(&self) -> Option<&Arc<dyn File>> {
4780 self.file.as_ref()
4781 }
4782
4783 pub fn resolve_file_path(&self, include_root: bool, cx: &App) -> Option<String> {
4784 if let Some(file) = self.file() {
4785 if file.path().file_name().is_none() || include_root {
4786 Some(file.full_path(cx).to_string_lossy().into_owned())
4787 } else {
4788 Some(file.path().display(file.path_style(cx)).to_string())
4789 }
4790 } else {
4791 None
4792 }
4793 }
4794
4795 pub fn words_in_range(&self, query: WordsQuery) -> BTreeMap<String, Range<Anchor>> {
4796 let query_str = query.fuzzy_contents;
4797 if query_str.is_some_and(|query| query.is_empty()) {
4798 return BTreeMap::default();
4799 }
4800
4801 let classifier = CharClassifier::new(self.language.clone().map(|language| LanguageScope {
4802 language,
4803 override_id: None,
4804 }));
4805
4806 let mut query_ix = 0;
4807 let query_chars = query_str.map(|query| query.chars().collect::<Vec<_>>());
4808 let query_len = query_chars.as_ref().map_or(0, |query| query.len());
4809
4810 let mut words = BTreeMap::default();
4811 let mut current_word_start_ix = None;
4812 let mut chunk_ix = query.range.start;
4813 for chunk in self.chunks(query.range, false) {
4814 for (i, c) in chunk.text.char_indices() {
4815 let ix = chunk_ix + i;
4816 if classifier.is_word(c) {
4817 if current_word_start_ix.is_none() {
4818 current_word_start_ix = Some(ix);
4819 }
4820
4821 if let Some(query_chars) = &query_chars
4822 && query_ix < query_len
4823 && c.to_lowercase().eq(query_chars[query_ix].to_lowercase())
4824 {
4825 query_ix += 1;
4826 }
4827 continue;
4828 } else if let Some(word_start) = current_word_start_ix.take()
4829 && query_ix == query_len
4830 {
4831 let word_range = self.anchor_before(word_start)..self.anchor_after(ix);
4832 let mut word_text = self.text_for_range(word_start..ix).peekable();
4833 let first_char = word_text
4834 .peek()
4835 .and_then(|first_chunk| first_chunk.chars().next());
4836 // Skip empty and "words" starting with digits as a heuristic to reduce useless completions
4837 if !query.skip_digits
4838 || first_char.is_none_or(|first_char| !first_char.is_digit(10))
4839 {
4840 words.insert(word_text.collect(), word_range);
4841 }
4842 }
4843 query_ix = 0;
4844 }
4845 chunk_ix += chunk.text.len();
4846 }
4847
4848 words
4849 }
4850}
4851
4852pub struct WordsQuery<'a> {
4853 /// Only returns words with all chars from the fuzzy string in them.
4854 pub fuzzy_contents: Option<&'a str>,
4855 /// Skips words that start with a digit.
4856 pub skip_digits: bool,
4857 /// Buffer offset range, to look for words.
4858 pub range: Range<usize>,
4859}
4860
4861fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
4862 indent_size_for_text(text.chars_at(Point::new(row, 0)))
4863}
4864
4865fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
4866 let mut result = IndentSize::spaces(0);
4867 for c in text {
4868 let kind = match c {
4869 ' ' => IndentKind::Space,
4870 '\t' => IndentKind::Tab,
4871 _ => break,
4872 };
4873 if result.len == 0 {
4874 result.kind = kind;
4875 }
4876 result.len += 1;
4877 }
4878 result
4879}
4880
4881impl Clone for BufferSnapshot {
4882 fn clone(&self) -> Self {
4883 Self {
4884 text: self.text.clone(),
4885 syntax: self.syntax.clone(),
4886 file: self.file.clone(),
4887 remote_selections: self.remote_selections.clone(),
4888 diagnostics: self.diagnostics.clone(),
4889 language: self.language.clone(),
4890 tree_sitter_data: self.tree_sitter_data.clone(),
4891 non_text_state_update_count: self.non_text_state_update_count,
4892 }
4893 }
4894}
4895
4896impl Deref for BufferSnapshot {
4897 type Target = text::BufferSnapshot;
4898
4899 fn deref(&self) -> &Self::Target {
4900 &self.text
4901 }
4902}
4903
4904unsafe impl Send for BufferChunks<'_> {}
4905
4906impl<'a> BufferChunks<'a> {
4907 pub(crate) fn new(
4908 text: &'a Rope,
4909 range: Range<usize>,
4910 syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
4911 diagnostics: bool,
4912 buffer_snapshot: Option<&'a BufferSnapshot>,
4913 ) -> Self {
4914 let mut highlights = None;
4915 if let Some((captures, highlight_maps)) = syntax {
4916 highlights = Some(BufferChunkHighlights {
4917 captures,
4918 next_capture: None,
4919 stack: Default::default(),
4920 highlight_maps,
4921 })
4922 }
4923
4924 let diagnostic_endpoints = diagnostics.then(|| Vec::new().into_iter().peekable());
4925 let chunks = text.chunks_in_range(range.clone());
4926
4927 let mut this = BufferChunks {
4928 range,
4929 buffer_snapshot,
4930 chunks,
4931 diagnostic_endpoints,
4932 error_depth: 0,
4933 warning_depth: 0,
4934 information_depth: 0,
4935 hint_depth: 0,
4936 unnecessary_depth: 0,
4937 underline: true,
4938 highlights,
4939 };
4940 this.initialize_diagnostic_endpoints();
4941 this
4942 }
4943
4944 /// Seeks to the given byte offset in the buffer.
4945 pub fn seek(&mut self, range: Range<usize>) {
4946 let old_range = std::mem::replace(&mut self.range, range.clone());
4947 self.chunks.set_range(self.range.clone());
4948 if let Some(highlights) = self.highlights.as_mut() {
4949 if old_range.start <= self.range.start && old_range.end >= self.range.end {
4950 // Reuse existing highlights stack, as the new range is a subrange of the old one.
4951 highlights
4952 .stack
4953 .retain(|(end_offset, _)| *end_offset > range.start);
4954 if let Some(capture) = &highlights.next_capture
4955 && range.start >= capture.node.start_byte()
4956 {
4957 let next_capture_end = capture.node.end_byte();
4958 if range.start < next_capture_end {
4959 highlights.stack.push((
4960 next_capture_end,
4961 highlights.highlight_maps[capture.grammar_index].get(capture.index),
4962 ));
4963 }
4964 highlights.next_capture.take();
4965 }
4966 } else if let Some(snapshot) = self.buffer_snapshot {
4967 let (captures, highlight_maps) = snapshot.get_highlights(self.range.clone());
4968 *highlights = BufferChunkHighlights {
4969 captures,
4970 next_capture: None,
4971 stack: Default::default(),
4972 highlight_maps,
4973 };
4974 } else {
4975 // We cannot obtain new highlights for a language-aware buffer iterator, as we don't have a buffer snapshot.
4976 // Seeking such BufferChunks is not supported.
4977 debug_assert!(
4978 false,
4979 "Attempted to seek on a language-aware buffer iterator without associated buffer snapshot"
4980 );
4981 }
4982
4983 highlights.captures.set_byte_range(self.range.clone());
4984 self.initialize_diagnostic_endpoints();
4985 }
4986 }
4987
4988 fn initialize_diagnostic_endpoints(&mut self) {
4989 if let Some(diagnostics) = self.diagnostic_endpoints.as_mut()
4990 && let Some(buffer) = self.buffer_snapshot
4991 {
4992 let mut diagnostic_endpoints = Vec::new();
4993 for entry in buffer.diagnostics_in_range::<_, usize>(self.range.clone(), false) {
4994 diagnostic_endpoints.push(DiagnosticEndpoint {
4995 offset: entry.range.start,
4996 is_start: true,
4997 severity: entry.diagnostic.severity,
4998 is_unnecessary: entry.diagnostic.is_unnecessary,
4999 underline: entry.diagnostic.underline,
5000 });
5001 diagnostic_endpoints.push(DiagnosticEndpoint {
5002 offset: entry.range.end,
5003 is_start: false,
5004 severity: entry.diagnostic.severity,
5005 is_unnecessary: entry.diagnostic.is_unnecessary,
5006 underline: entry.diagnostic.underline,
5007 });
5008 }
5009 diagnostic_endpoints
5010 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
5011 *diagnostics = diagnostic_endpoints.into_iter().peekable();
5012 self.hint_depth = 0;
5013 self.error_depth = 0;
5014 self.warning_depth = 0;
5015 self.information_depth = 0;
5016 }
5017 }
5018
5019 /// The current byte offset in the buffer.
5020 pub fn offset(&self) -> usize {
5021 self.range.start
5022 }
5023
5024 pub fn range(&self) -> Range<usize> {
5025 self.range.clone()
5026 }
5027
5028 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
5029 let depth = match endpoint.severity {
5030 DiagnosticSeverity::ERROR => &mut self.error_depth,
5031 DiagnosticSeverity::WARNING => &mut self.warning_depth,
5032 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
5033 DiagnosticSeverity::HINT => &mut self.hint_depth,
5034 _ => return,
5035 };
5036 if endpoint.is_start {
5037 *depth += 1;
5038 } else {
5039 *depth -= 1;
5040 }
5041
5042 if endpoint.is_unnecessary {
5043 if endpoint.is_start {
5044 self.unnecessary_depth += 1;
5045 } else {
5046 self.unnecessary_depth -= 1;
5047 }
5048 }
5049 }
5050
5051 fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
5052 if self.error_depth > 0 {
5053 Some(DiagnosticSeverity::ERROR)
5054 } else if self.warning_depth > 0 {
5055 Some(DiagnosticSeverity::WARNING)
5056 } else if self.information_depth > 0 {
5057 Some(DiagnosticSeverity::INFORMATION)
5058 } else if self.hint_depth > 0 {
5059 Some(DiagnosticSeverity::HINT)
5060 } else {
5061 None
5062 }
5063 }
5064
5065 fn current_code_is_unnecessary(&self) -> bool {
5066 self.unnecessary_depth > 0
5067 }
5068}
5069
5070impl<'a> Iterator for BufferChunks<'a> {
5071 type Item = Chunk<'a>;
5072
5073 fn next(&mut self) -> Option<Self::Item> {
5074 let mut next_capture_start = usize::MAX;
5075 let mut next_diagnostic_endpoint = usize::MAX;
5076
5077 if let Some(highlights) = self.highlights.as_mut() {
5078 while let Some((parent_capture_end, _)) = highlights.stack.last() {
5079 if *parent_capture_end <= self.range.start {
5080 highlights.stack.pop();
5081 } else {
5082 break;
5083 }
5084 }
5085
5086 if highlights.next_capture.is_none() {
5087 highlights.next_capture = highlights.captures.next();
5088 }
5089
5090 while let Some(capture) = highlights.next_capture.as_ref() {
5091 if self.range.start < capture.node.start_byte() {
5092 next_capture_start = capture.node.start_byte();
5093 break;
5094 } else {
5095 let highlight_id =
5096 highlights.highlight_maps[capture.grammar_index].get(capture.index);
5097 highlights
5098 .stack
5099 .push((capture.node.end_byte(), highlight_id));
5100 highlights.next_capture = highlights.captures.next();
5101 }
5102 }
5103 }
5104
5105 let mut diagnostic_endpoints = std::mem::take(&mut self.diagnostic_endpoints);
5106 if let Some(diagnostic_endpoints) = diagnostic_endpoints.as_mut() {
5107 while let Some(endpoint) = diagnostic_endpoints.peek().copied() {
5108 if endpoint.offset <= self.range.start {
5109 self.update_diagnostic_depths(endpoint);
5110 diagnostic_endpoints.next();
5111 self.underline = endpoint.underline;
5112 } else {
5113 next_diagnostic_endpoint = endpoint.offset;
5114 break;
5115 }
5116 }
5117 }
5118 self.diagnostic_endpoints = diagnostic_endpoints;
5119
5120 if let Some(ChunkBitmaps {
5121 text: chunk,
5122 chars: chars_map,
5123 tabs,
5124 }) = self.chunks.peek_with_bitmaps()
5125 {
5126 let chunk_start = self.range.start;
5127 let mut chunk_end = (self.chunks.offset() + chunk.len())
5128 .min(next_capture_start)
5129 .min(next_diagnostic_endpoint);
5130 let mut highlight_id = None;
5131 if let Some(highlights) = self.highlights.as_ref()
5132 && let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last()
5133 {
5134 chunk_end = chunk_end.min(*parent_capture_end);
5135 highlight_id = Some(*parent_highlight_id);
5136 }
5137 let bit_start = chunk_start - self.chunks.offset();
5138 let bit_end = chunk_end - self.chunks.offset();
5139
5140 let slice = &chunk[bit_start..bit_end];
5141
5142 let mask = 1u128.unbounded_shl(bit_end as u32).wrapping_sub(1);
5143 let tabs = (tabs >> bit_start) & mask;
5144 let chars = (chars_map >> bit_start) & mask;
5145
5146 self.range.start = chunk_end;
5147 if self.range.start == self.chunks.offset() + chunk.len() {
5148 self.chunks.next().unwrap();
5149 }
5150
5151 Some(Chunk {
5152 text: slice,
5153 syntax_highlight_id: highlight_id,
5154 underline: self.underline,
5155 diagnostic_severity: self.current_diagnostic_severity(),
5156 is_unnecessary: self.current_code_is_unnecessary(),
5157 tabs,
5158 chars,
5159 ..Chunk::default()
5160 })
5161 } else {
5162 None
5163 }
5164 }
5165}
5166
5167impl operation_queue::Operation for Operation {
5168 fn lamport_timestamp(&self) -> clock::Lamport {
5169 match self {
5170 Operation::Buffer(_) => {
5171 unreachable!("buffer operations should never be deferred at this layer")
5172 }
5173 Operation::UpdateDiagnostics {
5174 lamport_timestamp, ..
5175 }
5176 | Operation::UpdateSelections {
5177 lamport_timestamp, ..
5178 }
5179 | Operation::UpdateCompletionTriggers {
5180 lamport_timestamp, ..
5181 }
5182 | Operation::UpdateLineEnding {
5183 lamport_timestamp, ..
5184 } => *lamport_timestamp,
5185 }
5186 }
5187}
5188
5189impl Default for Diagnostic {
5190 fn default() -> Self {
5191 Self {
5192 source: Default::default(),
5193 source_kind: DiagnosticSourceKind::Other,
5194 code: None,
5195 code_description: None,
5196 severity: DiagnosticSeverity::ERROR,
5197 message: Default::default(),
5198 markdown: None,
5199 group_id: 0,
5200 is_primary: false,
5201 is_disk_based: false,
5202 is_unnecessary: false,
5203 underline: true,
5204 data: None,
5205 }
5206 }
5207}
5208
5209impl IndentSize {
5210 /// Returns an [`IndentSize`] representing the given spaces.
5211 pub fn spaces(len: u32) -> Self {
5212 Self {
5213 len,
5214 kind: IndentKind::Space,
5215 }
5216 }
5217
5218 /// Returns an [`IndentSize`] representing a tab.
5219 pub fn tab() -> Self {
5220 Self {
5221 len: 1,
5222 kind: IndentKind::Tab,
5223 }
5224 }
5225
5226 /// An iterator over the characters represented by this [`IndentSize`].
5227 pub fn chars(&self) -> impl Iterator<Item = char> {
5228 iter::repeat(self.char()).take(self.len as usize)
5229 }
5230
5231 /// The character representation of this [`IndentSize`].
5232 pub fn char(&self) -> char {
5233 match self.kind {
5234 IndentKind::Space => ' ',
5235 IndentKind::Tab => '\t',
5236 }
5237 }
5238
5239 /// Consumes the current [`IndentSize`] and returns a new one that has
5240 /// been shrunk or enlarged by the given size along the given direction.
5241 pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
5242 match direction {
5243 Ordering::Less => {
5244 if self.kind == size.kind && self.len >= size.len {
5245 self.len -= size.len;
5246 }
5247 }
5248 Ordering::Equal => {}
5249 Ordering::Greater => {
5250 if self.len == 0 {
5251 self = size;
5252 } else if self.kind == size.kind {
5253 self.len += size.len;
5254 }
5255 }
5256 }
5257 self
5258 }
5259
5260 pub fn len_with_expanded_tabs(&self, tab_size: NonZeroU32) -> usize {
5261 match self.kind {
5262 IndentKind::Space => self.len as usize,
5263 IndentKind::Tab => self.len as usize * tab_size.get() as usize,
5264 }
5265 }
5266}
5267
5268#[cfg(any(test, feature = "test-support"))]
5269pub struct TestFile {
5270 pub path: Arc<RelPath>,
5271 pub root_name: String,
5272 pub local_root: Option<PathBuf>,
5273}
5274
5275#[cfg(any(test, feature = "test-support"))]
5276impl File for TestFile {
5277 fn path(&self) -> &Arc<RelPath> {
5278 &self.path
5279 }
5280
5281 fn full_path(&self, _: &gpui::App) -> PathBuf {
5282 PathBuf::from(self.root_name.clone()).join(self.path.as_std_path())
5283 }
5284
5285 fn as_local(&self) -> Option<&dyn LocalFile> {
5286 if self.local_root.is_some() {
5287 Some(self)
5288 } else {
5289 None
5290 }
5291 }
5292
5293 fn disk_state(&self) -> DiskState {
5294 unimplemented!()
5295 }
5296
5297 fn file_name<'a>(&'a self, _: &'a gpui::App) -> &'a str {
5298 self.path().file_name().unwrap_or(self.root_name.as_ref())
5299 }
5300
5301 fn worktree_id(&self, _: &App) -> WorktreeId {
5302 WorktreeId::from_usize(0)
5303 }
5304
5305 fn to_proto(&self, _: &App) -> rpc::proto::File {
5306 unimplemented!()
5307 }
5308
5309 fn is_private(&self) -> bool {
5310 false
5311 }
5312
5313 fn path_style(&self, _cx: &App) -> PathStyle {
5314 PathStyle::local()
5315 }
5316}
5317
5318#[cfg(any(test, feature = "test-support"))]
5319impl LocalFile for TestFile {
5320 fn abs_path(&self, _cx: &App) -> PathBuf {
5321 PathBuf::from(self.local_root.as_ref().unwrap())
5322 .join(&self.root_name)
5323 .join(self.path.as_std_path())
5324 }
5325
5326 fn load(&self, _cx: &App) -> Task<Result<String>> {
5327 unimplemented!()
5328 }
5329
5330 fn load_bytes(&self, _cx: &App) -> Task<Result<Vec<u8>>> {
5331 unimplemented!()
5332 }
5333}
5334
5335pub(crate) fn contiguous_ranges(
5336 values: impl Iterator<Item = u32>,
5337 max_len: usize,
5338) -> impl Iterator<Item = Range<u32>> {
5339 let mut values = values;
5340 let mut current_range: Option<Range<u32>> = None;
5341 std::iter::from_fn(move || {
5342 loop {
5343 if let Some(value) = values.next() {
5344 if let Some(range) = &mut current_range
5345 && value == range.end
5346 && range.len() < max_len
5347 {
5348 range.end += 1;
5349 continue;
5350 }
5351
5352 let prev_range = current_range.clone();
5353 current_range = Some(value..(value + 1));
5354 if prev_range.is_some() {
5355 return prev_range;
5356 }
5357 } else {
5358 return current_range.take();
5359 }
5360 }
5361 })
5362}
5363
5364#[derive(Default, Debug)]
5365pub struct CharClassifier {
5366 scope: Option<LanguageScope>,
5367 scope_context: Option<CharScopeContext>,
5368 ignore_punctuation: bool,
5369}
5370
5371impl CharClassifier {
5372 pub fn new(scope: Option<LanguageScope>) -> Self {
5373 Self {
5374 scope,
5375 scope_context: None,
5376 ignore_punctuation: false,
5377 }
5378 }
5379
5380 pub fn scope_context(self, scope_context: Option<CharScopeContext>) -> Self {
5381 Self {
5382 scope_context,
5383 ..self
5384 }
5385 }
5386
5387 pub fn ignore_punctuation(self, ignore_punctuation: bool) -> Self {
5388 Self {
5389 ignore_punctuation,
5390 ..self
5391 }
5392 }
5393
5394 pub fn is_whitespace(&self, c: char) -> bool {
5395 self.kind(c) == CharKind::Whitespace
5396 }
5397
5398 pub fn is_word(&self, c: char) -> bool {
5399 self.kind(c) == CharKind::Word
5400 }
5401
5402 pub fn is_punctuation(&self, c: char) -> bool {
5403 self.kind(c) == CharKind::Punctuation
5404 }
5405
5406 pub fn kind_with(&self, c: char, ignore_punctuation: bool) -> CharKind {
5407 if c.is_alphanumeric() || c == '_' {
5408 return CharKind::Word;
5409 }
5410
5411 if let Some(scope) = &self.scope {
5412 let characters = match self.scope_context {
5413 Some(CharScopeContext::Completion) => scope.completion_query_characters(),
5414 Some(CharScopeContext::LinkedEdit) => scope.linked_edit_characters(),
5415 None => scope.word_characters(),
5416 };
5417 if let Some(characters) = characters
5418 && characters.contains(&c)
5419 {
5420 return CharKind::Word;
5421 }
5422 }
5423
5424 if c.is_whitespace() {
5425 return CharKind::Whitespace;
5426 }
5427
5428 if ignore_punctuation {
5429 CharKind::Word
5430 } else {
5431 CharKind::Punctuation
5432 }
5433 }
5434
5435 pub fn kind(&self, c: char) -> CharKind {
5436 self.kind_with(c, self.ignore_punctuation)
5437 }
5438}
5439
5440/// Find all of the ranges of whitespace that occur at the ends of lines
5441/// in the given rope.
5442///
5443/// This could also be done with a regex search, but this implementation
5444/// avoids copying text.
5445pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
5446 let mut ranges = Vec::new();
5447
5448 let mut offset = 0;
5449 let mut prev_chunk_trailing_whitespace_range = 0..0;
5450 for chunk in rope.chunks() {
5451 let mut prev_line_trailing_whitespace_range = 0..0;
5452 for (i, line) in chunk.split('\n').enumerate() {
5453 let line_end_offset = offset + line.len();
5454 let trimmed_line_len = line.trim_end_matches([' ', '\t']).len();
5455 let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
5456
5457 if i == 0 && trimmed_line_len == 0 {
5458 trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
5459 }
5460 if !prev_line_trailing_whitespace_range.is_empty() {
5461 ranges.push(prev_line_trailing_whitespace_range);
5462 }
5463
5464 offset = line_end_offset + 1;
5465 prev_line_trailing_whitespace_range = trailing_whitespace_range;
5466 }
5467
5468 offset -= 1;
5469 prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
5470 }
5471
5472 if !prev_chunk_trailing_whitespace_range.is_empty() {
5473 ranges.push(prev_chunk_trailing_whitespace_range);
5474 }
5475
5476 ranges
5477}