1pub mod row_chunk;
2
3use crate::{
4 DebuggerTextObject, LanguageScope, Outline, OutlineConfig, RunnableCapture, RunnableTag,
5 TextObject, TreeSitterOptions,
6 diagnostic_set::{DiagnosticEntry, DiagnosticEntryRef, DiagnosticGroup},
7 language_settings::{LanguageSettings, language_settings},
8 outline::OutlineItem,
9 row_chunk::RowChunks,
10 syntax_map::{
11 SyntaxLayer, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures, SyntaxMapMatch,
12 SyntaxMapMatches, SyntaxSnapshot, ToTreeSitterPoint,
13 },
14 task_context::RunnableRange,
15 text_diff::text_diff,
16};
17pub use crate::{
18 Grammar, Language, LanguageRegistry,
19 diagnostic_set::DiagnosticSet,
20 highlight_map::{HighlightId, HighlightMap},
21 proto,
22};
23use anyhow::{Context as _, Result};
24pub use clock::ReplicaId;
25use clock::{Global, Lamport};
26use collections::{HashMap, HashSet};
27use fs::MTime;
28use futures::channel::oneshot;
29use gpui::{
30 App, AppContext as _, Context, Entity, EventEmitter, HighlightStyle, SharedString, StyledText,
31 Task, TaskLabel, TextStyle,
32};
33
34use lsp::{LanguageServerId, NumberOrString};
35use parking_lot::{Mutex, RawMutex, lock_api::MutexGuard};
36use serde::{Deserialize, Serialize};
37use serde_json::Value;
38use settings::WorktreeId;
39use smallvec::SmallVec;
40use smol::future::yield_now;
41use std::{
42 any::Any,
43 borrow::Cow,
44 cell::Cell,
45 cmp::{self, Ordering, Reverse},
46 collections::{BTreeMap, BTreeSet},
47 future::Future,
48 iter::{self, Iterator, Peekable},
49 mem,
50 num::NonZeroU32,
51 ops::{Deref, Range},
52 path::PathBuf,
53 rc,
54 sync::{Arc, LazyLock},
55 time::{Duration, Instant},
56 vec,
57};
58use sum_tree::TreeMap;
59use text::operation_queue::OperationQueue;
60use text::*;
61pub use text::{
62 Anchor, Bias, Buffer as TextBuffer, BufferId, BufferSnapshot as TextBufferSnapshot, Edit,
63 LineIndent, OffsetRangeExt, OffsetUtf16, Patch, Point, PointUtf16, Rope, Selection,
64 SelectionGoal, Subscription, TextDimension, TextSummary, ToOffset, ToOffsetUtf16, ToPoint,
65 ToPointUtf16, Transaction, TransactionId, Unclipped,
66};
67use theme::{ActiveTheme as _, SyntaxTheme};
68#[cfg(any(test, feature = "test-support"))]
69use util::RandomCharIter;
70use util::{RangeExt, debug_panic, maybe, paths::PathStyle, rel_path::RelPath};
71
72#[cfg(any(test, feature = "test-support"))]
73pub use {tree_sitter_python, tree_sitter_rust, tree_sitter_typescript};
74
75pub use lsp::DiagnosticSeverity;
76
77/// A label for the background task spawned by the buffer to compute
78/// a diff against the contents of its file.
79pub static BUFFER_DIFF_TASK: LazyLock<TaskLabel> = LazyLock::new(TaskLabel::new);
80
81/// Indicate whether a [`Buffer`] has permissions to edit.
82#[derive(PartialEq, Clone, Copy, Debug)]
83pub enum Capability {
84 /// The buffer is a mutable replica.
85 ReadWrite,
86 /// The buffer is a read-only replica.
87 ReadOnly,
88}
89
90pub type BufferRow = u32;
91
92/// An in-memory representation of a source code file, including its text,
93/// syntax trees, git status, and diagnostics.
94pub struct Buffer {
95 text: TextBuffer,
96 branch_state: Option<BufferBranchState>,
97 /// Filesystem state, `None` when there is no path.
98 file: Option<Arc<dyn File>>,
99 /// The mtime of the file when this buffer was last loaded from
100 /// or saved to disk.
101 saved_mtime: Option<MTime>,
102 /// The version vector when this buffer was last loaded from
103 /// or saved to disk.
104 saved_version: clock::Global,
105 preview_version: clock::Global,
106 transaction_depth: usize,
107 was_dirty_before_starting_transaction: Option<bool>,
108 reload_task: Option<Task<Result<()>>>,
109 language: Option<Arc<Language>>,
110 autoindent_requests: Vec<Arc<AutoindentRequest>>,
111 wait_for_autoindent_txs: Vec<oneshot::Sender<()>>,
112 pending_autoindent: Option<Task<()>>,
113 sync_parse_timeout: Duration,
114 syntax_map: Mutex<SyntaxMap>,
115 reparse: Option<Task<()>>,
116 parse_status: (watch::Sender<ParseStatus>, watch::Receiver<ParseStatus>),
117 non_text_state_update_count: usize,
118 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
119 remote_selections: TreeMap<ReplicaId, SelectionSet>,
120 diagnostics_timestamp: clock::Lamport,
121 completion_triggers: BTreeSet<String>,
122 completion_triggers_per_language_server: HashMap<LanguageServerId, BTreeSet<String>>,
123 completion_triggers_timestamp: clock::Lamport,
124 deferred_ops: OperationQueue<Operation>,
125 capability: Capability,
126 has_conflict: bool,
127 /// Memoize calls to has_changes_since(saved_version).
128 /// The contents of a cell are (self.version, has_changes) at the time of a last call.
129 has_unsaved_edits: Cell<(clock::Global, bool)>,
130 change_bits: Vec<rc::Weak<Cell<bool>>>,
131 _subscriptions: Vec<gpui::Subscription>,
132 tree_sitter_data: Arc<Mutex<TreeSitterData>>,
133}
134
135#[derive(Debug, Clone)]
136pub struct TreeSitterData {
137 chunks: RowChunks,
138 brackets_by_chunks: Vec<Option<Vec<BracketMatch>>>,
139}
140
141const MAX_ROWS_IN_A_CHUNK: u32 = 50;
142
143impl TreeSitterData {
144 fn clear(&mut self) {
145 self.brackets_by_chunks = vec![None; self.chunks.len()];
146 }
147
148 fn new(snapshot: text::BufferSnapshot) -> Self {
149 let chunks = RowChunks::new(snapshot, MAX_ROWS_IN_A_CHUNK);
150 Self {
151 brackets_by_chunks: vec![None; chunks.len()],
152 chunks,
153 }
154 }
155}
156
157#[derive(Copy, Clone, Debug, PartialEq, Eq)]
158pub enum ParseStatus {
159 Idle,
160 Parsing,
161}
162
163struct BufferBranchState {
164 base_buffer: Entity<Buffer>,
165 merged_operations: Vec<Lamport>,
166}
167
168/// An immutable, cheaply cloneable representation of a fixed
169/// state of a buffer.
170pub struct BufferSnapshot {
171 pub text: text::BufferSnapshot,
172 pub syntax: SyntaxSnapshot,
173 file: Option<Arc<dyn File>>,
174 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
175 remote_selections: TreeMap<ReplicaId, SelectionSet>,
176 language: Option<Arc<Language>>,
177 non_text_state_update_count: usize,
178 tree_sitter_data: Arc<Mutex<TreeSitterData>>,
179}
180
181/// The kind and amount of indentation in a particular line. For now,
182/// assumes that indentation is all the same character.
183#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
184pub struct IndentSize {
185 /// The number of bytes that comprise the indentation.
186 pub len: u32,
187 /// The kind of whitespace used for indentation.
188 pub kind: IndentKind,
189}
190
191/// A whitespace character that's used for indentation.
192#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
193pub enum IndentKind {
194 /// An ASCII space character.
195 #[default]
196 Space,
197 /// An ASCII tab character.
198 Tab,
199}
200
201/// The shape of a selection cursor.
202#[derive(Copy, Clone, Debug, Default, PartialEq, Eq)]
203pub enum CursorShape {
204 /// A vertical bar
205 #[default]
206 Bar,
207 /// A block that surrounds the following character
208 Block,
209 /// An underline that runs along the following character
210 Underline,
211 /// A box drawn around the following character
212 Hollow,
213}
214
215impl From<settings::CursorShape> for CursorShape {
216 fn from(shape: settings::CursorShape) -> Self {
217 match shape {
218 settings::CursorShape::Bar => CursorShape::Bar,
219 settings::CursorShape::Block => CursorShape::Block,
220 settings::CursorShape::Underline => CursorShape::Underline,
221 settings::CursorShape::Hollow => CursorShape::Hollow,
222 }
223 }
224}
225
226#[derive(Clone, Debug)]
227struct SelectionSet {
228 line_mode: bool,
229 cursor_shape: CursorShape,
230 selections: Arc<[Selection<Anchor>]>,
231 lamport_timestamp: clock::Lamport,
232}
233
234/// A diagnostic associated with a certain range of a buffer.
235#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
236pub struct Diagnostic {
237 /// The name of the service that produced this diagnostic.
238 pub source: Option<String>,
239 /// A machine-readable code that identifies this diagnostic.
240 pub code: Option<NumberOrString>,
241 pub code_description: Option<lsp::Uri>,
242 /// Whether this diagnostic is a hint, warning, or error.
243 pub severity: DiagnosticSeverity,
244 /// The human-readable message associated with this diagnostic.
245 pub message: String,
246 /// The human-readable message (in markdown format)
247 pub markdown: Option<String>,
248 /// An id that identifies the group to which this diagnostic belongs.
249 ///
250 /// When a language server produces a diagnostic with
251 /// one or more associated diagnostics, those diagnostics are all
252 /// assigned a single group ID.
253 pub group_id: usize,
254 /// Whether this diagnostic is the primary diagnostic for its group.
255 ///
256 /// In a given group, the primary diagnostic is the top-level diagnostic
257 /// returned by the language server. The non-primary diagnostics are the
258 /// associated diagnostics.
259 pub is_primary: bool,
260 /// Whether this diagnostic is considered to originate from an analysis of
261 /// files on disk, as opposed to any unsaved buffer contents. This is a
262 /// property of a given diagnostic source, and is configured for a given
263 /// language server via the [`LspAdapter::disk_based_diagnostic_sources`](crate::LspAdapter::disk_based_diagnostic_sources) method
264 /// for the language server.
265 pub is_disk_based: bool,
266 /// Whether this diagnostic marks unnecessary code.
267 pub is_unnecessary: bool,
268 /// Quick separation of diagnostics groups based by their source.
269 pub source_kind: DiagnosticSourceKind,
270 /// Data from language server that produced this diagnostic. Passed back to the LS when we request code actions for this diagnostic.
271 pub data: Option<Value>,
272 /// Whether to underline the corresponding text range in the editor.
273 pub underline: bool,
274}
275
276#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)]
277pub enum DiagnosticSourceKind {
278 Pulled,
279 Pushed,
280 Other,
281}
282
283/// An operation used to synchronize this buffer with its other replicas.
284#[derive(Clone, Debug, PartialEq)]
285pub enum Operation {
286 /// A text operation.
287 Buffer(text::Operation),
288
289 /// An update to the buffer's diagnostics.
290 UpdateDiagnostics {
291 /// The id of the language server that produced the new diagnostics.
292 server_id: LanguageServerId,
293 /// The diagnostics.
294 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
295 /// The buffer's lamport timestamp.
296 lamport_timestamp: clock::Lamport,
297 },
298
299 /// An update to the most recent selections in this buffer.
300 UpdateSelections {
301 /// The selections.
302 selections: Arc<[Selection<Anchor>]>,
303 /// The buffer's lamport timestamp.
304 lamport_timestamp: clock::Lamport,
305 /// Whether the selections are in 'line mode'.
306 line_mode: bool,
307 /// The [`CursorShape`] associated with these selections.
308 cursor_shape: CursorShape,
309 },
310
311 /// An update to the characters that should trigger autocompletion
312 /// for this buffer.
313 UpdateCompletionTriggers {
314 /// The characters that trigger autocompletion.
315 triggers: Vec<String>,
316 /// The buffer's lamport timestamp.
317 lamport_timestamp: clock::Lamport,
318 /// The language server ID.
319 server_id: LanguageServerId,
320 },
321
322 /// An update to the line ending type of this buffer.
323 UpdateLineEnding {
324 /// The line ending type.
325 line_ending: LineEnding,
326 /// The buffer's lamport timestamp.
327 lamport_timestamp: clock::Lamport,
328 },
329}
330
331/// An event that occurs in a buffer.
332#[derive(Clone, Debug, PartialEq)]
333pub enum BufferEvent {
334 /// The buffer was changed in a way that must be
335 /// propagated to its other replicas.
336 Operation {
337 operation: Operation,
338 is_local: bool,
339 },
340 /// The buffer was edited.
341 Edited,
342 /// The buffer's `dirty` bit changed.
343 DirtyChanged,
344 /// The buffer was saved.
345 Saved,
346 /// The buffer's file was changed on disk.
347 FileHandleChanged,
348 /// The buffer was reloaded.
349 Reloaded,
350 /// The buffer is in need of a reload
351 ReloadNeeded,
352 /// The buffer's language was changed.
353 LanguageChanged,
354 /// The buffer's syntax trees were updated.
355 Reparsed,
356 /// The buffer's diagnostics were updated.
357 DiagnosticsUpdated,
358 /// The buffer gained or lost editing capabilities.
359 CapabilityChanged,
360}
361
362/// The file associated with a buffer.
363pub trait File: Send + Sync + Any {
364 /// Returns the [`LocalFile`] associated with this file, if the
365 /// file is local.
366 fn as_local(&self) -> Option<&dyn LocalFile>;
367
368 /// Returns whether this file is local.
369 fn is_local(&self) -> bool {
370 self.as_local().is_some()
371 }
372
373 /// Returns whether the file is new, exists in storage, or has been deleted. Includes metadata
374 /// only available in some states, such as modification time.
375 fn disk_state(&self) -> DiskState;
376
377 /// Returns the path of this file relative to the worktree's root directory.
378 fn path(&self) -> &Arc<RelPath>;
379
380 /// Returns the path of this file relative to the worktree's parent directory (this means it
381 /// includes the name of the worktree's root folder).
382 fn full_path(&self, cx: &App) -> PathBuf;
383
384 /// Returns the path style of this file.
385 fn path_style(&self, cx: &App) -> PathStyle;
386
387 /// Returns the last component of this handle's absolute path. If this handle refers to the root
388 /// of its worktree, then this method will return the name of the worktree itself.
389 fn file_name<'a>(&'a self, cx: &'a App) -> &'a str;
390
391 /// Returns the id of the worktree to which this file belongs.
392 ///
393 /// This is needed for looking up project-specific settings.
394 fn worktree_id(&self, cx: &App) -> WorktreeId;
395
396 /// Converts this file into a protobuf message.
397 fn to_proto(&self, cx: &App) -> rpc::proto::File;
398
399 /// Return whether Zed considers this to be a private file.
400 fn is_private(&self) -> bool;
401}
402
403/// The file's storage status - whether it's stored (`Present`), and if so when it was last
404/// modified. In the case where the file is not stored, it can be either `New` or `Deleted`. In the
405/// UI these two states are distinguished. For example, the buffer tab does not display a deletion
406/// indicator for new files.
407#[derive(Copy, Clone, Debug, PartialEq)]
408pub enum DiskState {
409 /// File created in Zed that has not been saved.
410 New,
411 /// File present on the filesystem.
412 Present { mtime: MTime },
413 /// Deleted file that was previously present.
414 Deleted,
415}
416
417impl DiskState {
418 /// Returns the file's last known modification time on disk.
419 pub fn mtime(self) -> Option<MTime> {
420 match self {
421 DiskState::New => None,
422 DiskState::Present { mtime } => Some(mtime),
423 DiskState::Deleted => None,
424 }
425 }
426
427 pub fn exists(&self) -> bool {
428 match self {
429 DiskState::New => false,
430 DiskState::Present { .. } => true,
431 DiskState::Deleted => false,
432 }
433 }
434}
435
436/// The file associated with a buffer, in the case where the file is on the local disk.
437pub trait LocalFile: File {
438 /// Returns the absolute path of this file
439 fn abs_path(&self, cx: &App) -> PathBuf;
440
441 /// Loads the file contents from disk and returns them as a UTF-8 encoded string.
442 fn load(&self, cx: &App) -> Task<Result<String>>;
443
444 /// Loads the file's contents from disk.
445 fn load_bytes(&self, cx: &App) -> Task<Result<Vec<u8>>>;
446}
447
448/// The auto-indent behavior associated with an editing operation.
449/// For some editing operations, each affected line of text has its
450/// indentation recomputed. For other operations, the entire block
451/// of edited text is adjusted uniformly.
452#[derive(Clone, Debug)]
453pub enum AutoindentMode {
454 /// Indent each line of inserted text.
455 EachLine,
456 /// Apply the same indentation adjustment to all of the lines
457 /// in a given insertion.
458 Block {
459 /// The original indentation column of the first line of each
460 /// insertion, if it has been copied.
461 ///
462 /// Knowing this makes it possible to preserve the relative indentation
463 /// of every line in the insertion from when it was copied.
464 ///
465 /// If the original indent column is `a`, and the first line of insertion
466 /// is then auto-indented to column `b`, then every other line of
467 /// the insertion will be auto-indented to column `b - a`
468 original_indent_columns: Vec<Option<u32>>,
469 },
470}
471
472#[derive(Clone)]
473struct AutoindentRequest {
474 before_edit: BufferSnapshot,
475 entries: Vec<AutoindentRequestEntry>,
476 is_block_mode: bool,
477 ignore_empty_lines: bool,
478}
479
480#[derive(Debug, Clone)]
481struct AutoindentRequestEntry {
482 /// A range of the buffer whose indentation should be adjusted.
483 range: Range<Anchor>,
484 /// Whether or not these lines should be considered brand new, for the
485 /// purpose of auto-indent. When text is not new, its indentation will
486 /// only be adjusted if the suggested indentation level has *changed*
487 /// since the edit was made.
488 first_line_is_new: bool,
489 indent_size: IndentSize,
490 original_indent_column: Option<u32>,
491}
492
493#[derive(Debug)]
494struct IndentSuggestion {
495 basis_row: u32,
496 delta: Ordering,
497 within_error: bool,
498}
499
500struct BufferChunkHighlights<'a> {
501 captures: SyntaxMapCaptures<'a>,
502 next_capture: Option<SyntaxMapCapture<'a>>,
503 stack: Vec<(usize, HighlightId)>,
504 highlight_maps: Vec<HighlightMap>,
505}
506
507/// An iterator that yields chunks of a buffer's text, along with their
508/// syntax highlights and diagnostic status.
509pub struct BufferChunks<'a> {
510 buffer_snapshot: Option<&'a BufferSnapshot>,
511 range: Range<usize>,
512 chunks: text::Chunks<'a>,
513 diagnostic_endpoints: Option<Peekable<vec::IntoIter<DiagnosticEndpoint>>>,
514 error_depth: usize,
515 warning_depth: usize,
516 information_depth: usize,
517 hint_depth: usize,
518 unnecessary_depth: usize,
519 underline: bool,
520 highlights: Option<BufferChunkHighlights<'a>>,
521}
522
523/// A chunk of a buffer's text, along with its syntax highlight and
524/// diagnostic status.
525#[derive(Clone, Debug, Default)]
526pub struct Chunk<'a> {
527 /// The text of the chunk.
528 pub text: &'a str,
529 /// The syntax highlighting style of the chunk.
530 pub syntax_highlight_id: Option<HighlightId>,
531 /// The highlight style that has been applied to this chunk in
532 /// the editor.
533 pub highlight_style: Option<HighlightStyle>,
534 /// The severity of diagnostic associated with this chunk, if any.
535 pub diagnostic_severity: Option<DiagnosticSeverity>,
536 /// A bitset of which characters are tabs in this string.
537 pub tabs: u128,
538 /// Bitmap of character indices in this chunk
539 pub chars: u128,
540 /// Whether this chunk of text is marked as unnecessary.
541 pub is_unnecessary: bool,
542 /// Whether this chunk of text was originally a tab character.
543 pub is_tab: bool,
544 /// Whether this chunk of text was originally an inlay.
545 pub is_inlay: bool,
546 /// Whether to underline the corresponding text range in the editor.
547 pub underline: bool,
548}
549
550/// A set of edits to a given version of a buffer, computed asynchronously.
551#[derive(Debug)]
552pub struct Diff {
553 pub base_version: clock::Global,
554 pub line_ending: LineEnding,
555 pub edits: Vec<(Range<usize>, Arc<str>)>,
556}
557
558#[derive(Debug, Clone, Copy)]
559pub(crate) struct DiagnosticEndpoint {
560 offset: usize,
561 is_start: bool,
562 underline: bool,
563 severity: DiagnosticSeverity,
564 is_unnecessary: bool,
565}
566
567/// A class of characters, used for characterizing a run of text.
568#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
569pub enum CharKind {
570 /// Whitespace.
571 Whitespace,
572 /// Punctuation.
573 Punctuation,
574 /// Word.
575 Word,
576}
577
578/// Context for character classification within a specific scope.
579#[derive(Copy, Clone, Eq, PartialEq, Debug)]
580pub enum CharScopeContext {
581 /// Character classification for completion queries.
582 ///
583 /// This context treats certain characters as word constituents that would
584 /// normally be considered punctuation, such as '-' in Tailwind classes
585 /// ("bg-yellow-100") or '.' in import paths ("foo.ts").
586 Completion,
587 /// Character classification for linked edits.
588 ///
589 /// This context handles characters that should be treated as part of
590 /// identifiers during linked editing operations, such as '.' in JSX
591 /// component names like `<Animated.View>`.
592 LinkedEdit,
593}
594
595/// A runnable is a set of data about a region that could be resolved into a task
596pub struct Runnable {
597 pub tags: SmallVec<[RunnableTag; 1]>,
598 pub language: Arc<Language>,
599 pub buffer: BufferId,
600}
601
602#[derive(Default, Clone, Debug)]
603pub struct HighlightedText {
604 pub text: SharedString,
605 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
606}
607
608#[derive(Default, Debug)]
609struct HighlightedTextBuilder {
610 pub text: String,
611 highlights: Vec<(Range<usize>, HighlightStyle)>,
612}
613
614impl HighlightedText {
615 pub fn from_buffer_range<T: ToOffset>(
616 range: Range<T>,
617 snapshot: &text::BufferSnapshot,
618 syntax_snapshot: &SyntaxSnapshot,
619 override_style: Option<HighlightStyle>,
620 syntax_theme: &SyntaxTheme,
621 ) -> Self {
622 let mut highlighted_text = HighlightedTextBuilder::default();
623 highlighted_text.add_text_from_buffer_range(
624 range,
625 snapshot,
626 syntax_snapshot,
627 override_style,
628 syntax_theme,
629 );
630 highlighted_text.build()
631 }
632
633 pub fn to_styled_text(&self, default_style: &TextStyle) -> StyledText {
634 gpui::StyledText::new(self.text.clone())
635 .with_default_highlights(default_style, self.highlights.iter().cloned())
636 }
637
638 /// Returns the first line without leading whitespace unless highlighted
639 /// and a boolean indicating if there are more lines after
640 pub fn first_line_preview(self) -> (Self, bool) {
641 let newline_ix = self.text.find('\n').unwrap_or(self.text.len());
642 let first_line = &self.text[..newline_ix];
643
644 // Trim leading whitespace, unless an edit starts prior to it.
645 let mut preview_start_ix = first_line.len() - first_line.trim_start().len();
646 if let Some((first_highlight_range, _)) = self.highlights.first() {
647 preview_start_ix = preview_start_ix.min(first_highlight_range.start);
648 }
649
650 let preview_text = &first_line[preview_start_ix..];
651 let preview_highlights = self
652 .highlights
653 .into_iter()
654 .skip_while(|(range, _)| range.end <= preview_start_ix)
655 .take_while(|(range, _)| range.start < newline_ix)
656 .filter_map(|(mut range, highlight)| {
657 range.start = range.start.saturating_sub(preview_start_ix);
658 range.end = range.end.min(newline_ix).saturating_sub(preview_start_ix);
659 if range.is_empty() {
660 None
661 } else {
662 Some((range, highlight))
663 }
664 });
665
666 let preview = Self {
667 text: SharedString::new(preview_text),
668 highlights: preview_highlights.collect(),
669 };
670
671 (preview, self.text.len() > newline_ix)
672 }
673}
674
675impl HighlightedTextBuilder {
676 pub fn build(self) -> HighlightedText {
677 HighlightedText {
678 text: self.text.into(),
679 highlights: self.highlights,
680 }
681 }
682
683 pub fn add_text_from_buffer_range<T: ToOffset>(
684 &mut self,
685 range: Range<T>,
686 snapshot: &text::BufferSnapshot,
687 syntax_snapshot: &SyntaxSnapshot,
688 override_style: Option<HighlightStyle>,
689 syntax_theme: &SyntaxTheme,
690 ) {
691 let range = range.to_offset(snapshot);
692 for chunk in Self::highlighted_chunks(range, snapshot, syntax_snapshot) {
693 let start = self.text.len();
694 self.text.push_str(chunk.text);
695 let end = self.text.len();
696
697 if let Some(highlight_style) = chunk
698 .syntax_highlight_id
699 .and_then(|id| id.style(syntax_theme))
700 {
701 let highlight_style = override_style.map_or(highlight_style, |override_style| {
702 highlight_style.highlight(override_style)
703 });
704 self.highlights.push((start..end, highlight_style));
705 } else if let Some(override_style) = override_style {
706 self.highlights.push((start..end, override_style));
707 }
708 }
709 }
710
711 fn highlighted_chunks<'a>(
712 range: Range<usize>,
713 snapshot: &'a text::BufferSnapshot,
714 syntax_snapshot: &'a SyntaxSnapshot,
715 ) -> BufferChunks<'a> {
716 let captures = syntax_snapshot.captures(range.clone(), snapshot, |grammar| {
717 grammar
718 .highlights_config
719 .as_ref()
720 .map(|config| &config.query)
721 });
722
723 let highlight_maps = captures
724 .grammars()
725 .iter()
726 .map(|grammar| grammar.highlight_map())
727 .collect();
728
729 BufferChunks::new(
730 snapshot.as_rope(),
731 range,
732 Some((captures, highlight_maps)),
733 false,
734 None,
735 )
736 }
737}
738
739#[derive(Clone)]
740pub struct EditPreview {
741 old_snapshot: text::BufferSnapshot,
742 applied_edits_snapshot: text::BufferSnapshot,
743 syntax_snapshot: SyntaxSnapshot,
744}
745
746impl EditPreview {
747 pub fn highlight_edits(
748 &self,
749 current_snapshot: &BufferSnapshot,
750 edits: &[(Range<Anchor>, impl AsRef<str>)],
751 include_deletions: bool,
752 cx: &App,
753 ) -> HighlightedText {
754 let Some(visible_range_in_preview_snapshot) = self.compute_visible_range(edits) else {
755 return HighlightedText::default();
756 };
757
758 let mut highlighted_text = HighlightedTextBuilder::default();
759
760 let mut offset_in_preview_snapshot = visible_range_in_preview_snapshot.start;
761
762 let insertion_highlight_style = HighlightStyle {
763 background_color: Some(cx.theme().status().created_background),
764 ..Default::default()
765 };
766 let deletion_highlight_style = HighlightStyle {
767 background_color: Some(cx.theme().status().deleted_background),
768 ..Default::default()
769 };
770 let syntax_theme = cx.theme().syntax();
771
772 for (range, edit_text) in edits {
773 let edit_new_end_in_preview_snapshot = range
774 .end
775 .bias_right(&self.old_snapshot)
776 .to_offset(&self.applied_edits_snapshot);
777 let edit_start_in_preview_snapshot =
778 edit_new_end_in_preview_snapshot - edit_text.as_ref().len();
779
780 let unchanged_range_in_preview_snapshot =
781 offset_in_preview_snapshot..edit_start_in_preview_snapshot;
782 if !unchanged_range_in_preview_snapshot.is_empty() {
783 highlighted_text.add_text_from_buffer_range(
784 unchanged_range_in_preview_snapshot,
785 &self.applied_edits_snapshot,
786 &self.syntax_snapshot,
787 None,
788 syntax_theme,
789 );
790 }
791
792 let range_in_current_snapshot = range.to_offset(current_snapshot);
793 if include_deletions && !range_in_current_snapshot.is_empty() {
794 highlighted_text.add_text_from_buffer_range(
795 range_in_current_snapshot,
796 ¤t_snapshot.text,
797 ¤t_snapshot.syntax,
798 Some(deletion_highlight_style),
799 syntax_theme,
800 );
801 }
802
803 if !edit_text.as_ref().is_empty() {
804 highlighted_text.add_text_from_buffer_range(
805 edit_start_in_preview_snapshot..edit_new_end_in_preview_snapshot,
806 &self.applied_edits_snapshot,
807 &self.syntax_snapshot,
808 Some(insertion_highlight_style),
809 syntax_theme,
810 );
811 }
812
813 offset_in_preview_snapshot = edit_new_end_in_preview_snapshot;
814 }
815
816 highlighted_text.add_text_from_buffer_range(
817 offset_in_preview_snapshot..visible_range_in_preview_snapshot.end,
818 &self.applied_edits_snapshot,
819 &self.syntax_snapshot,
820 None,
821 syntax_theme,
822 );
823
824 highlighted_text.build()
825 }
826
827 fn compute_visible_range<T>(&self, edits: &[(Range<Anchor>, T)]) -> Option<Range<usize>> {
828 let (first, _) = edits.first()?;
829 let (last, _) = edits.last()?;
830
831 let start = first
832 .start
833 .bias_left(&self.old_snapshot)
834 .to_point(&self.applied_edits_snapshot);
835 let end = last
836 .end
837 .bias_right(&self.old_snapshot)
838 .to_point(&self.applied_edits_snapshot);
839
840 // Ensure that the first line of the first edit and the last line of the last edit are always fully visible
841 let range = Point::new(start.row, 0)
842 ..Point::new(end.row, self.applied_edits_snapshot.line_len(end.row));
843
844 Some(range.to_offset(&self.applied_edits_snapshot))
845 }
846}
847
848#[derive(Clone, Debug, PartialEq, Eq)]
849pub struct BracketMatch {
850 pub open_range: Range<usize>,
851 pub close_range: Range<usize>,
852 pub newline_only: bool,
853 pub depth: usize,
854}
855
856impl BracketMatch {
857 pub fn bracket_ranges(self) -> (Range<usize>, Range<usize>) {
858 (self.open_range, self.close_range)
859 }
860}
861
862impl Buffer {
863 /// Create a new buffer with the given base text.
864 pub fn local<T: Into<String>>(base_text: T, cx: &Context<Self>) -> Self {
865 Self::build(
866 TextBuffer::new(
867 ReplicaId::LOCAL,
868 cx.entity_id().as_non_zero_u64().into(),
869 base_text.into(),
870 ),
871 None,
872 Capability::ReadWrite,
873 )
874 }
875
876 /// Create a new buffer with the given base text that has proper line endings and other normalization applied.
877 pub fn local_normalized(
878 base_text_normalized: Rope,
879 line_ending: LineEnding,
880 cx: &Context<Self>,
881 ) -> Self {
882 Self::build(
883 TextBuffer::new_normalized(
884 ReplicaId::LOCAL,
885 cx.entity_id().as_non_zero_u64().into(),
886 line_ending,
887 base_text_normalized,
888 ),
889 None,
890 Capability::ReadWrite,
891 )
892 }
893
894 /// Create a new buffer that is a replica of a remote buffer.
895 pub fn remote(
896 remote_id: BufferId,
897 replica_id: ReplicaId,
898 capability: Capability,
899 base_text: impl Into<String>,
900 ) -> Self {
901 Self::build(
902 TextBuffer::new(replica_id, remote_id, base_text.into()),
903 None,
904 capability,
905 )
906 }
907
908 /// Create a new buffer that is a replica of a remote buffer, populating its
909 /// state from the given protobuf message.
910 pub fn from_proto(
911 replica_id: ReplicaId,
912 capability: Capability,
913 message: proto::BufferState,
914 file: Option<Arc<dyn File>>,
915 ) -> Result<Self> {
916 let buffer_id = BufferId::new(message.id).context("Could not deserialize buffer_id")?;
917 let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text);
918 let mut this = Self::build(buffer, file, capability);
919 this.text.set_line_ending(proto::deserialize_line_ending(
920 rpc::proto::LineEnding::from_i32(message.line_ending).context("missing line_ending")?,
921 ));
922 this.saved_version = proto::deserialize_version(&message.saved_version);
923 this.saved_mtime = message.saved_mtime.map(|time| time.into());
924 Ok(this)
925 }
926
927 /// Serialize the buffer's state to a protobuf message.
928 pub fn to_proto(&self, cx: &App) -> proto::BufferState {
929 proto::BufferState {
930 id: self.remote_id().into(),
931 file: self.file.as_ref().map(|f| f.to_proto(cx)),
932 base_text: self.base_text().to_string(),
933 line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
934 saved_version: proto::serialize_version(&self.saved_version),
935 saved_mtime: self.saved_mtime.map(|time| time.into()),
936 }
937 }
938
939 /// Serialize as protobufs all of the changes to the buffer since the given version.
940 pub fn serialize_ops(
941 &self,
942 since: Option<clock::Global>,
943 cx: &App,
944 ) -> Task<Vec<proto::Operation>> {
945 let mut operations = Vec::new();
946 operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
947
948 operations.extend(self.remote_selections.iter().map(|(_, set)| {
949 proto::serialize_operation(&Operation::UpdateSelections {
950 selections: set.selections.clone(),
951 lamport_timestamp: set.lamport_timestamp,
952 line_mode: set.line_mode,
953 cursor_shape: set.cursor_shape,
954 })
955 }));
956
957 for (server_id, diagnostics) in &self.diagnostics {
958 operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
959 lamport_timestamp: self.diagnostics_timestamp,
960 server_id: *server_id,
961 diagnostics: diagnostics.iter().cloned().collect(),
962 }));
963 }
964
965 for (server_id, completions) in &self.completion_triggers_per_language_server {
966 operations.push(proto::serialize_operation(
967 &Operation::UpdateCompletionTriggers {
968 triggers: completions.iter().cloned().collect(),
969 lamport_timestamp: self.completion_triggers_timestamp,
970 server_id: *server_id,
971 },
972 ));
973 }
974
975 let text_operations = self.text.operations().clone();
976 cx.background_spawn(async move {
977 let since = since.unwrap_or_default();
978 operations.extend(
979 text_operations
980 .iter()
981 .filter(|(_, op)| !since.observed(op.timestamp()))
982 .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
983 );
984 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
985 operations
986 })
987 }
988
989 /// Assign a language to the buffer, returning the buffer.
990 pub fn with_language(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
991 self.set_language(Some(language), cx);
992 self
993 }
994
995 /// Returns the [`Capability`] of this buffer.
996 pub fn capability(&self) -> Capability {
997 self.capability
998 }
999
1000 /// Whether this buffer can only be read.
1001 pub fn read_only(&self) -> bool {
1002 self.capability == Capability::ReadOnly
1003 }
1004
1005 /// Builds a [`Buffer`] with the given underlying [`TextBuffer`], diff base, [`File`] and [`Capability`].
1006 pub fn build(buffer: TextBuffer, file: Option<Arc<dyn File>>, capability: Capability) -> Self {
1007 let saved_mtime = file.as_ref().and_then(|file| file.disk_state().mtime());
1008 let snapshot = buffer.snapshot();
1009 let syntax_map = Mutex::new(SyntaxMap::new(&snapshot));
1010 let tree_sitter_data = TreeSitterData::new(snapshot);
1011 Self {
1012 saved_mtime,
1013 tree_sitter_data: Arc::new(Mutex::new(tree_sitter_data)),
1014 saved_version: buffer.version(),
1015 preview_version: buffer.version(),
1016 reload_task: None,
1017 transaction_depth: 0,
1018 was_dirty_before_starting_transaction: None,
1019 has_unsaved_edits: Cell::new((buffer.version(), false)),
1020 text: buffer,
1021 branch_state: None,
1022 file,
1023 capability,
1024 syntax_map,
1025 reparse: None,
1026 non_text_state_update_count: 0,
1027 sync_parse_timeout: Duration::from_millis(1),
1028 parse_status: watch::channel(ParseStatus::Idle),
1029 autoindent_requests: Default::default(),
1030 wait_for_autoindent_txs: Default::default(),
1031 pending_autoindent: Default::default(),
1032 language: None,
1033 remote_selections: Default::default(),
1034 diagnostics: Default::default(),
1035 diagnostics_timestamp: Lamport::MIN,
1036 completion_triggers: Default::default(),
1037 completion_triggers_per_language_server: Default::default(),
1038 completion_triggers_timestamp: Lamport::MIN,
1039 deferred_ops: OperationQueue::new(),
1040 has_conflict: false,
1041 change_bits: Default::default(),
1042 _subscriptions: Vec::new(),
1043 }
1044 }
1045
1046 pub fn build_snapshot(
1047 text: Rope,
1048 language: Option<Arc<Language>>,
1049 language_registry: Option<Arc<LanguageRegistry>>,
1050 cx: &mut App,
1051 ) -> impl Future<Output = BufferSnapshot> + use<> {
1052 let entity_id = cx.reserve_entity::<Self>().entity_id();
1053 let buffer_id = entity_id.as_non_zero_u64().into();
1054 async move {
1055 let text =
1056 TextBuffer::new_normalized(ReplicaId::LOCAL, buffer_id, Default::default(), text)
1057 .snapshot();
1058 let mut syntax = SyntaxMap::new(&text).snapshot();
1059 if let Some(language) = language.clone() {
1060 let language_registry = language_registry.clone();
1061 syntax.reparse(&text, language_registry, language);
1062 }
1063 let tree_sitter_data = TreeSitterData::new(text.clone());
1064 BufferSnapshot {
1065 text,
1066 syntax,
1067 file: None,
1068 diagnostics: Default::default(),
1069 remote_selections: Default::default(),
1070 tree_sitter_data: Arc::new(Mutex::new(tree_sitter_data)),
1071 language,
1072 non_text_state_update_count: 0,
1073 }
1074 }
1075 }
1076
1077 pub fn build_empty_snapshot(cx: &mut App) -> BufferSnapshot {
1078 let entity_id = cx.reserve_entity::<Self>().entity_id();
1079 let buffer_id = entity_id.as_non_zero_u64().into();
1080 let text = TextBuffer::new_normalized(
1081 ReplicaId::LOCAL,
1082 buffer_id,
1083 Default::default(),
1084 Rope::new(),
1085 )
1086 .snapshot();
1087 let syntax = SyntaxMap::new(&text).snapshot();
1088 let tree_sitter_data = TreeSitterData::new(text.clone());
1089 BufferSnapshot {
1090 text,
1091 syntax,
1092 tree_sitter_data: Arc::new(Mutex::new(tree_sitter_data)),
1093 file: None,
1094 diagnostics: Default::default(),
1095 remote_selections: Default::default(),
1096 language: None,
1097 non_text_state_update_count: 0,
1098 }
1099 }
1100
1101 #[cfg(any(test, feature = "test-support"))]
1102 pub fn build_snapshot_sync(
1103 text: Rope,
1104 language: Option<Arc<Language>>,
1105 language_registry: Option<Arc<LanguageRegistry>>,
1106 cx: &mut App,
1107 ) -> BufferSnapshot {
1108 let entity_id = cx.reserve_entity::<Self>().entity_id();
1109 let buffer_id = entity_id.as_non_zero_u64().into();
1110 let text =
1111 TextBuffer::new_normalized(ReplicaId::LOCAL, buffer_id, Default::default(), text)
1112 .snapshot();
1113 let mut syntax = SyntaxMap::new(&text).snapshot();
1114 if let Some(language) = language.clone() {
1115 syntax.reparse(&text, language_registry, language);
1116 }
1117 let tree_sitter_data = TreeSitterData::new(text.clone());
1118 BufferSnapshot {
1119 text,
1120 syntax,
1121 tree_sitter_data: Arc::new(Mutex::new(tree_sitter_data)),
1122 file: None,
1123 diagnostics: Default::default(),
1124 remote_selections: Default::default(),
1125 language,
1126 non_text_state_update_count: 0,
1127 }
1128 }
1129
1130 /// Retrieve a snapshot of the buffer's current state. This is computationally
1131 /// cheap, and allows reading from the buffer on a background thread.
1132 pub fn snapshot(&self) -> BufferSnapshot {
1133 let text = self.text.snapshot();
1134 let mut syntax_map = self.syntax_map.lock();
1135 syntax_map.interpolate(&text);
1136 let syntax = syntax_map.snapshot();
1137
1138 BufferSnapshot {
1139 text,
1140 syntax,
1141 tree_sitter_data: self.tree_sitter_data.clone(),
1142 file: self.file.clone(),
1143 remote_selections: self.remote_selections.clone(),
1144 diagnostics: self.diagnostics.clone(),
1145 language: self.language.clone(),
1146 non_text_state_update_count: self.non_text_state_update_count,
1147 }
1148 }
1149
1150 pub fn branch(&mut self, cx: &mut Context<Self>) -> Entity<Self> {
1151 let this = cx.entity();
1152 cx.new(|cx| {
1153 let mut branch = Self {
1154 branch_state: Some(BufferBranchState {
1155 base_buffer: this.clone(),
1156 merged_operations: Default::default(),
1157 }),
1158 language: self.language.clone(),
1159 has_conflict: self.has_conflict,
1160 has_unsaved_edits: Cell::new(self.has_unsaved_edits.get_mut().clone()),
1161 _subscriptions: vec![cx.subscribe(&this, Self::on_base_buffer_event)],
1162 ..Self::build(self.text.branch(), self.file.clone(), self.capability())
1163 };
1164 if let Some(language_registry) = self.language_registry() {
1165 branch.set_language_registry(language_registry);
1166 }
1167
1168 // Reparse the branch buffer so that we get syntax highlighting immediately.
1169 branch.reparse(cx);
1170
1171 branch
1172 })
1173 }
1174
1175 pub fn preview_edits(
1176 &self,
1177 edits: Arc<[(Range<Anchor>, Arc<str>)]>,
1178 cx: &App,
1179 ) -> Task<EditPreview> {
1180 let registry = self.language_registry();
1181 let language = self.language().cloned();
1182 let old_snapshot = self.text.snapshot();
1183 let mut branch_buffer = self.text.branch();
1184 let mut syntax_snapshot = self.syntax_map.lock().snapshot();
1185 cx.background_spawn(async move {
1186 if !edits.is_empty() {
1187 if let Some(language) = language.clone() {
1188 syntax_snapshot.reparse(&old_snapshot, registry.clone(), language);
1189 }
1190
1191 branch_buffer.edit(edits.iter().cloned());
1192 let snapshot = branch_buffer.snapshot();
1193 syntax_snapshot.interpolate(&snapshot);
1194
1195 if let Some(language) = language {
1196 syntax_snapshot.reparse(&snapshot, registry, language);
1197 }
1198 }
1199 EditPreview {
1200 old_snapshot,
1201 applied_edits_snapshot: branch_buffer.snapshot(),
1202 syntax_snapshot,
1203 }
1204 })
1205 }
1206
1207 /// Applies all of the changes in this buffer that intersect any of the
1208 /// given `ranges` to its base buffer.
1209 ///
1210 /// If `ranges` is empty, then all changes will be applied. This buffer must
1211 /// be a branch buffer to call this method.
1212 pub fn merge_into_base(&mut self, ranges: Vec<Range<usize>>, cx: &mut Context<Self>) {
1213 let Some(base_buffer) = self.base_buffer() else {
1214 debug_panic!("not a branch buffer");
1215 return;
1216 };
1217
1218 let mut ranges = if ranges.is_empty() {
1219 &[0..usize::MAX]
1220 } else {
1221 ranges.as_slice()
1222 }
1223 .iter()
1224 .peekable();
1225
1226 let mut edits = Vec::new();
1227 for edit in self.edits_since::<usize>(&base_buffer.read(cx).version()) {
1228 let mut is_included = false;
1229 while let Some(range) = ranges.peek() {
1230 if range.end < edit.new.start {
1231 ranges.next().unwrap();
1232 } else {
1233 if range.start <= edit.new.end {
1234 is_included = true;
1235 }
1236 break;
1237 }
1238 }
1239
1240 if is_included {
1241 edits.push((
1242 edit.old.clone(),
1243 self.text_for_range(edit.new.clone()).collect::<String>(),
1244 ));
1245 }
1246 }
1247
1248 let operation = base_buffer.update(cx, |base_buffer, cx| {
1249 // cx.emit(BufferEvent::DiffBaseChanged);
1250 base_buffer.edit(edits, None, cx)
1251 });
1252
1253 if let Some(operation) = operation
1254 && let Some(BufferBranchState {
1255 merged_operations, ..
1256 }) = &mut self.branch_state
1257 {
1258 merged_operations.push(operation);
1259 }
1260 }
1261
1262 fn on_base_buffer_event(
1263 &mut self,
1264 _: Entity<Buffer>,
1265 event: &BufferEvent,
1266 cx: &mut Context<Self>,
1267 ) {
1268 let BufferEvent::Operation { operation, .. } = event else {
1269 return;
1270 };
1271 let Some(BufferBranchState {
1272 merged_operations, ..
1273 }) = &mut self.branch_state
1274 else {
1275 return;
1276 };
1277
1278 let mut operation_to_undo = None;
1279 if let Operation::Buffer(text::Operation::Edit(operation)) = &operation
1280 && let Ok(ix) = merged_operations.binary_search(&operation.timestamp)
1281 {
1282 merged_operations.remove(ix);
1283 operation_to_undo = Some(operation.timestamp);
1284 }
1285
1286 self.apply_ops([operation.clone()], cx);
1287
1288 if let Some(timestamp) = operation_to_undo {
1289 let counts = [(timestamp, u32::MAX)].into_iter().collect();
1290 self.undo_operations(counts, cx);
1291 }
1292 }
1293
1294 #[cfg(test)]
1295 pub(crate) fn as_text_snapshot(&self) -> &text::BufferSnapshot {
1296 &self.text
1297 }
1298
1299 /// Retrieve a snapshot of the buffer's raw text, without any
1300 /// language-related state like the syntax tree or diagnostics.
1301 pub fn text_snapshot(&self) -> text::BufferSnapshot {
1302 self.text.snapshot()
1303 }
1304
1305 /// The file associated with the buffer, if any.
1306 pub fn file(&self) -> Option<&Arc<dyn File>> {
1307 self.file.as_ref()
1308 }
1309
1310 /// The version of the buffer that was last saved or reloaded from disk.
1311 pub fn saved_version(&self) -> &clock::Global {
1312 &self.saved_version
1313 }
1314
1315 /// The mtime of the buffer's file when the buffer was last saved or reloaded from disk.
1316 pub fn saved_mtime(&self) -> Option<MTime> {
1317 self.saved_mtime
1318 }
1319
1320 /// Assign a language to the buffer.
1321 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1322 self.non_text_state_update_count += 1;
1323 self.syntax_map.lock().clear(&self.text);
1324 self.language = language;
1325 self.was_changed();
1326 self.reparse(cx);
1327 cx.emit(BufferEvent::LanguageChanged);
1328 }
1329
1330 /// Assign a language registry to the buffer. This allows the buffer to retrieve
1331 /// other languages if parts of the buffer are written in different languages.
1332 pub fn set_language_registry(&self, language_registry: Arc<LanguageRegistry>) {
1333 self.syntax_map
1334 .lock()
1335 .set_language_registry(language_registry);
1336 }
1337
1338 pub fn language_registry(&self) -> Option<Arc<LanguageRegistry>> {
1339 self.syntax_map.lock().language_registry()
1340 }
1341
1342 /// Assign the line ending type to the buffer.
1343 pub fn set_line_ending(&mut self, line_ending: LineEnding, cx: &mut Context<Self>) {
1344 self.text.set_line_ending(line_ending);
1345
1346 let lamport_timestamp = self.text.lamport_clock.tick();
1347 self.send_operation(
1348 Operation::UpdateLineEnding {
1349 line_ending,
1350 lamport_timestamp,
1351 },
1352 true,
1353 cx,
1354 );
1355 }
1356
1357 /// Assign the buffer a new [`Capability`].
1358 pub fn set_capability(&mut self, capability: Capability, cx: &mut Context<Self>) {
1359 if self.capability != capability {
1360 self.capability = capability;
1361 cx.emit(BufferEvent::CapabilityChanged)
1362 }
1363 }
1364
1365 /// This method is called to signal that the buffer has been saved.
1366 pub fn did_save(
1367 &mut self,
1368 version: clock::Global,
1369 mtime: Option<MTime>,
1370 cx: &mut Context<Self>,
1371 ) {
1372 self.saved_version = version.clone();
1373 self.has_unsaved_edits.set((version, false));
1374 self.has_conflict = false;
1375 self.saved_mtime = mtime;
1376 self.was_changed();
1377 cx.emit(BufferEvent::Saved);
1378 cx.notify();
1379 }
1380
1381 /// Reloads the contents of the buffer from disk.
1382 pub fn reload(&mut self, cx: &Context<Self>) -> oneshot::Receiver<Option<Transaction>> {
1383 let (tx, rx) = futures::channel::oneshot::channel();
1384 let prev_version = self.text.version();
1385 self.reload_task = Some(cx.spawn(async move |this, cx| {
1386 let Some((new_mtime, new_text)) = this.update(cx, |this, cx| {
1387 let file = this.file.as_ref()?.as_local()?;
1388
1389 Some((file.disk_state().mtime(), file.load(cx)))
1390 })?
1391 else {
1392 return Ok(());
1393 };
1394
1395 let new_text = new_text.await?;
1396 let diff = this
1397 .update(cx, |this, cx| this.diff(new_text.clone(), cx))?
1398 .await;
1399 this.update(cx, |this, cx| {
1400 if this.version() == diff.base_version {
1401 this.finalize_last_transaction();
1402 this.apply_diff(diff, cx);
1403 tx.send(this.finalize_last_transaction().cloned()).ok();
1404 this.has_conflict = false;
1405 this.did_reload(this.version(), this.line_ending(), new_mtime, cx);
1406 } else {
1407 if !diff.edits.is_empty()
1408 || this
1409 .edits_since::<usize>(&diff.base_version)
1410 .next()
1411 .is_some()
1412 {
1413 this.has_conflict = true;
1414 }
1415
1416 this.did_reload(prev_version, this.line_ending(), this.saved_mtime, cx);
1417 }
1418
1419 this.reload_task.take();
1420 })
1421 }));
1422 rx
1423 }
1424
1425 /// This method is called to signal that the buffer has been reloaded.
1426 pub fn did_reload(
1427 &mut self,
1428 version: clock::Global,
1429 line_ending: LineEnding,
1430 mtime: Option<MTime>,
1431 cx: &mut Context<Self>,
1432 ) {
1433 self.saved_version = version;
1434 self.has_unsaved_edits
1435 .set((self.saved_version.clone(), false));
1436 self.text.set_line_ending(line_ending);
1437 self.saved_mtime = mtime;
1438 cx.emit(BufferEvent::Reloaded);
1439 cx.notify();
1440 }
1441
1442 /// Updates the [`File`] backing this buffer. This should be called when
1443 /// the file has changed or has been deleted.
1444 pub fn file_updated(&mut self, new_file: Arc<dyn File>, cx: &mut Context<Self>) {
1445 let was_dirty = self.is_dirty();
1446 let mut file_changed = false;
1447
1448 if let Some(old_file) = self.file.as_ref() {
1449 if new_file.path() != old_file.path() {
1450 file_changed = true;
1451 }
1452
1453 let old_state = old_file.disk_state();
1454 let new_state = new_file.disk_state();
1455 if old_state != new_state {
1456 file_changed = true;
1457 if !was_dirty && matches!(new_state, DiskState::Present { .. }) {
1458 cx.emit(BufferEvent::ReloadNeeded)
1459 }
1460 }
1461 } else {
1462 file_changed = true;
1463 };
1464
1465 self.file = Some(new_file);
1466 if file_changed {
1467 self.was_changed();
1468 self.non_text_state_update_count += 1;
1469 if was_dirty != self.is_dirty() {
1470 cx.emit(BufferEvent::DirtyChanged);
1471 }
1472 cx.emit(BufferEvent::FileHandleChanged);
1473 cx.notify();
1474 }
1475 }
1476
1477 pub fn base_buffer(&self) -> Option<Entity<Self>> {
1478 Some(self.branch_state.as_ref()?.base_buffer.clone())
1479 }
1480
1481 /// Returns the primary [`Language`] assigned to this [`Buffer`].
1482 pub fn language(&self) -> Option<&Arc<Language>> {
1483 self.language.as_ref()
1484 }
1485
1486 /// Returns the [`Language`] at the given location.
1487 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
1488 let offset = position.to_offset(self);
1489 let mut is_first = true;
1490 let start_anchor = self.anchor_before(offset);
1491 let end_anchor = self.anchor_after(offset);
1492 self.syntax_map
1493 .lock()
1494 .layers_for_range(offset..offset, &self.text, false)
1495 .filter(|layer| {
1496 if is_first {
1497 is_first = false;
1498 return true;
1499 }
1500
1501 layer
1502 .included_sub_ranges
1503 .map(|sub_ranges| {
1504 sub_ranges.iter().any(|sub_range| {
1505 let is_before_start = sub_range.end.cmp(&start_anchor, self).is_lt();
1506 let is_after_end = sub_range.start.cmp(&end_anchor, self).is_gt();
1507 !is_before_start && !is_after_end
1508 })
1509 })
1510 .unwrap_or(true)
1511 })
1512 .last()
1513 .map(|info| info.language.clone())
1514 .or_else(|| self.language.clone())
1515 }
1516
1517 /// Returns each [`Language`] for the active syntax layers at the given location.
1518 pub fn languages_at<D: ToOffset>(&self, position: D) -> Vec<Arc<Language>> {
1519 let offset = position.to_offset(self);
1520 let mut languages: Vec<Arc<Language>> = self
1521 .syntax_map
1522 .lock()
1523 .layers_for_range(offset..offset, &self.text, false)
1524 .map(|info| info.language.clone())
1525 .collect();
1526
1527 if languages.is_empty()
1528 && let Some(buffer_language) = self.language()
1529 {
1530 languages.push(buffer_language.clone());
1531 }
1532
1533 languages
1534 }
1535
1536 /// An integer version number that accounts for all updates besides
1537 /// the buffer's text itself (which is versioned via a version vector).
1538 pub fn non_text_state_update_count(&self) -> usize {
1539 self.non_text_state_update_count
1540 }
1541
1542 /// Whether the buffer is being parsed in the background.
1543 #[cfg(any(test, feature = "test-support"))]
1544 pub fn is_parsing(&self) -> bool {
1545 self.reparse.is_some()
1546 }
1547
1548 /// Indicates whether the buffer contains any regions that may be
1549 /// written in a language that hasn't been loaded yet.
1550 pub fn contains_unknown_injections(&self) -> bool {
1551 self.syntax_map.lock().contains_unknown_injections()
1552 }
1553
1554 #[cfg(any(test, feature = "test-support"))]
1555 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
1556 self.sync_parse_timeout = timeout;
1557 }
1558
1559 /// Called after an edit to synchronize the buffer's main parse tree with
1560 /// the buffer's new underlying state.
1561 ///
1562 /// Locks the syntax map and interpolates the edits since the last reparse
1563 /// into the foreground syntax tree.
1564 ///
1565 /// Then takes a stable snapshot of the syntax map before unlocking it.
1566 /// The snapshot with the interpolated edits is sent to a background thread,
1567 /// where we ask Tree-sitter to perform an incremental parse.
1568 ///
1569 /// Meanwhile, in the foreground, we block the main thread for up to 1ms
1570 /// waiting on the parse to complete. As soon as it completes, we proceed
1571 /// synchronously, unless a 1ms timeout elapses.
1572 ///
1573 /// If we time out waiting on the parse, we spawn a second task waiting
1574 /// until the parse does complete and return with the interpolated tree still
1575 /// in the foreground. When the background parse completes, call back into
1576 /// the main thread and assign the foreground parse state.
1577 ///
1578 /// If the buffer or grammar changed since the start of the background parse,
1579 /// initiate an additional reparse recursively. To avoid concurrent parses
1580 /// for the same buffer, we only initiate a new parse if we are not already
1581 /// parsing in the background.
1582 pub fn reparse(&mut self, cx: &mut Context<Self>) {
1583 if self.reparse.is_some() {
1584 return;
1585 }
1586 let language = if let Some(language) = self.language.clone() {
1587 language
1588 } else {
1589 return;
1590 };
1591
1592 let text = self.text_snapshot();
1593 let parsed_version = self.version();
1594
1595 let mut syntax_map = self.syntax_map.lock();
1596 syntax_map.interpolate(&text);
1597 let language_registry = syntax_map.language_registry();
1598 let mut syntax_snapshot = syntax_map.snapshot();
1599 drop(syntax_map);
1600
1601 let parse_task = cx.background_spawn({
1602 let language = language.clone();
1603 let language_registry = language_registry.clone();
1604 async move {
1605 syntax_snapshot.reparse(&text, language_registry, language);
1606 syntax_snapshot
1607 }
1608 });
1609
1610 self.parse_status.0.send(ParseStatus::Parsing).unwrap();
1611 match cx
1612 .background_executor()
1613 .block_with_timeout(self.sync_parse_timeout, parse_task)
1614 {
1615 Ok(new_syntax_snapshot) => {
1616 self.did_finish_parsing(new_syntax_snapshot, cx);
1617 self.reparse = None;
1618 }
1619 Err(parse_task) => {
1620 // todo(lw): hot foreground spawn
1621 self.reparse = Some(cx.spawn(async move |this, cx| {
1622 let new_syntax_map = cx.background_spawn(parse_task).await;
1623 this.update(cx, move |this, cx| {
1624 let grammar_changed = || {
1625 this.language.as_ref().is_none_or(|current_language| {
1626 !Arc::ptr_eq(&language, current_language)
1627 })
1628 };
1629 let language_registry_changed = || {
1630 new_syntax_map.contains_unknown_injections()
1631 && language_registry.is_some_and(|registry| {
1632 registry.version() != new_syntax_map.language_registry_version()
1633 })
1634 };
1635 let parse_again = this.version.changed_since(&parsed_version)
1636 || language_registry_changed()
1637 || grammar_changed();
1638 this.did_finish_parsing(new_syntax_map, cx);
1639 this.reparse = None;
1640 if parse_again {
1641 this.reparse(cx);
1642 }
1643 })
1644 .ok();
1645 }));
1646 }
1647 }
1648 }
1649
1650 fn did_finish_parsing(&mut self, syntax_snapshot: SyntaxSnapshot, cx: &mut Context<Self>) {
1651 self.was_changed();
1652 self.non_text_state_update_count += 1;
1653 self.syntax_map.lock().did_parse(syntax_snapshot);
1654 self.request_autoindent(cx);
1655 self.parse_status.0.send(ParseStatus::Idle).unwrap();
1656 self.tree_sitter_data.lock().clear();
1657 cx.emit(BufferEvent::Reparsed);
1658 cx.notify();
1659 }
1660
1661 pub fn parse_status(&self) -> watch::Receiver<ParseStatus> {
1662 self.parse_status.1.clone()
1663 }
1664
1665 /// Assign to the buffer a set of diagnostics created by a given language server.
1666 pub fn update_diagnostics(
1667 &mut self,
1668 server_id: LanguageServerId,
1669 diagnostics: DiagnosticSet,
1670 cx: &mut Context<Self>,
1671 ) {
1672 let lamport_timestamp = self.text.lamport_clock.tick();
1673 let op = Operation::UpdateDiagnostics {
1674 server_id,
1675 diagnostics: diagnostics.iter().cloned().collect(),
1676 lamport_timestamp,
1677 };
1678
1679 self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
1680 self.send_operation(op, true, cx);
1681 }
1682
1683 pub fn buffer_diagnostics(
1684 &self,
1685 for_server: Option<LanguageServerId>,
1686 ) -> Vec<&DiagnosticEntry<Anchor>> {
1687 match for_server {
1688 Some(server_id) => match self.diagnostics.binary_search_by_key(&server_id, |v| v.0) {
1689 Ok(idx) => self.diagnostics[idx].1.iter().collect(),
1690 Err(_) => Vec::new(),
1691 },
1692 None => self
1693 .diagnostics
1694 .iter()
1695 .flat_map(|(_, diagnostic_set)| diagnostic_set.iter())
1696 .collect(),
1697 }
1698 }
1699
1700 fn request_autoindent(&mut self, cx: &mut Context<Self>) {
1701 if let Some(indent_sizes) = self.compute_autoindents() {
1702 let indent_sizes = cx.background_spawn(indent_sizes);
1703 match cx
1704 .background_executor()
1705 .block_with_timeout(Duration::from_micros(500), indent_sizes)
1706 {
1707 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
1708 Err(indent_sizes) => {
1709 self.pending_autoindent = Some(cx.spawn(async move |this, cx| {
1710 let indent_sizes = indent_sizes.await;
1711 this.update(cx, |this, cx| {
1712 this.apply_autoindents(indent_sizes, cx);
1713 })
1714 .ok();
1715 }));
1716 }
1717 }
1718 } else {
1719 self.autoindent_requests.clear();
1720 for tx in self.wait_for_autoindent_txs.drain(..) {
1721 tx.send(()).ok();
1722 }
1723 }
1724 }
1725
1726 fn compute_autoindents(
1727 &self,
1728 ) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>> + use<>> {
1729 let max_rows_between_yields = 100;
1730 let snapshot = self.snapshot();
1731 if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
1732 return None;
1733 }
1734
1735 let autoindent_requests = self.autoindent_requests.clone();
1736 Some(async move {
1737 let mut indent_sizes = BTreeMap::<u32, (IndentSize, bool)>::new();
1738 for request in autoindent_requests {
1739 // Resolve each edited range to its row in the current buffer and in the
1740 // buffer before this batch of edits.
1741 let mut row_ranges = Vec::new();
1742 let mut old_to_new_rows = BTreeMap::new();
1743 let mut language_indent_sizes_by_new_row = Vec::new();
1744 for entry in &request.entries {
1745 let position = entry.range.start;
1746 let new_row = position.to_point(&snapshot).row;
1747 let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
1748 language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
1749
1750 if !entry.first_line_is_new {
1751 let old_row = position.to_point(&request.before_edit).row;
1752 old_to_new_rows.insert(old_row, new_row);
1753 }
1754 row_ranges.push((new_row..new_end_row, entry.original_indent_column));
1755 }
1756
1757 // Build a map containing the suggested indentation for each of the edited lines
1758 // with respect to the state of the buffer before these edits. This map is keyed
1759 // by the rows for these lines in the current state of the buffer.
1760 let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
1761 let old_edited_ranges =
1762 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
1763 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1764 let mut language_indent_size = IndentSize::default();
1765 for old_edited_range in old_edited_ranges {
1766 let suggestions = request
1767 .before_edit
1768 .suggest_autoindents(old_edited_range.clone())
1769 .into_iter()
1770 .flatten();
1771 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1772 if let Some(suggestion) = suggestion {
1773 let new_row = *old_to_new_rows.get(&old_row).unwrap();
1774
1775 // Find the indent size based on the language for this row.
1776 while let Some((row, size)) = language_indent_sizes.peek() {
1777 if *row > new_row {
1778 break;
1779 }
1780 language_indent_size = *size;
1781 language_indent_sizes.next();
1782 }
1783
1784 let suggested_indent = old_to_new_rows
1785 .get(&suggestion.basis_row)
1786 .and_then(|from_row| {
1787 Some(old_suggestions.get(from_row).copied()?.0)
1788 })
1789 .unwrap_or_else(|| {
1790 request
1791 .before_edit
1792 .indent_size_for_line(suggestion.basis_row)
1793 })
1794 .with_delta(suggestion.delta, language_indent_size);
1795 old_suggestions
1796 .insert(new_row, (suggested_indent, suggestion.within_error));
1797 }
1798 }
1799 yield_now().await;
1800 }
1801
1802 // Compute new suggestions for each line, but only include them in the result
1803 // if they differ from the old suggestion for that line.
1804 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1805 let mut language_indent_size = IndentSize::default();
1806 for (row_range, original_indent_column) in row_ranges {
1807 let new_edited_row_range = if request.is_block_mode {
1808 row_range.start..row_range.start + 1
1809 } else {
1810 row_range.clone()
1811 };
1812
1813 let suggestions = snapshot
1814 .suggest_autoindents(new_edited_row_range.clone())
1815 .into_iter()
1816 .flatten();
1817 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
1818 if let Some(suggestion) = suggestion {
1819 // Find the indent size based on the language for this row.
1820 while let Some((row, size)) = language_indent_sizes.peek() {
1821 if *row > new_row {
1822 break;
1823 }
1824 language_indent_size = *size;
1825 language_indent_sizes.next();
1826 }
1827
1828 let suggested_indent = indent_sizes
1829 .get(&suggestion.basis_row)
1830 .copied()
1831 .map(|e| e.0)
1832 .unwrap_or_else(|| {
1833 snapshot.indent_size_for_line(suggestion.basis_row)
1834 })
1835 .with_delta(suggestion.delta, language_indent_size);
1836
1837 if old_suggestions.get(&new_row).is_none_or(
1838 |(old_indentation, was_within_error)| {
1839 suggested_indent != *old_indentation
1840 && (!suggestion.within_error || *was_within_error)
1841 },
1842 ) {
1843 indent_sizes.insert(
1844 new_row,
1845 (suggested_indent, request.ignore_empty_lines),
1846 );
1847 }
1848 }
1849 }
1850
1851 if let (true, Some(original_indent_column)) =
1852 (request.is_block_mode, original_indent_column)
1853 {
1854 let new_indent =
1855 if let Some((indent, _)) = indent_sizes.get(&row_range.start) {
1856 *indent
1857 } else {
1858 snapshot.indent_size_for_line(row_range.start)
1859 };
1860 let delta = new_indent.len as i64 - original_indent_column as i64;
1861 if delta != 0 {
1862 for row in row_range.skip(1) {
1863 indent_sizes.entry(row).or_insert_with(|| {
1864 let mut size = snapshot.indent_size_for_line(row);
1865 if size.kind == new_indent.kind {
1866 match delta.cmp(&0) {
1867 Ordering::Greater => size.len += delta as u32,
1868 Ordering::Less => {
1869 size.len = size.len.saturating_sub(-delta as u32)
1870 }
1871 Ordering::Equal => {}
1872 }
1873 }
1874 (size, request.ignore_empty_lines)
1875 });
1876 }
1877 }
1878 }
1879
1880 yield_now().await;
1881 }
1882 }
1883
1884 indent_sizes
1885 .into_iter()
1886 .filter_map(|(row, (indent, ignore_empty_lines))| {
1887 if ignore_empty_lines && snapshot.line_len(row) == 0 {
1888 None
1889 } else {
1890 Some((row, indent))
1891 }
1892 })
1893 .collect()
1894 })
1895 }
1896
1897 fn apply_autoindents(
1898 &mut self,
1899 indent_sizes: BTreeMap<u32, IndentSize>,
1900 cx: &mut Context<Self>,
1901 ) {
1902 self.autoindent_requests.clear();
1903 for tx in self.wait_for_autoindent_txs.drain(..) {
1904 tx.send(()).ok();
1905 }
1906
1907 let edits: Vec<_> = indent_sizes
1908 .into_iter()
1909 .filter_map(|(row, indent_size)| {
1910 let current_size = indent_size_for_line(self, row);
1911 Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
1912 })
1913 .collect();
1914
1915 let preserve_preview = self.preserve_preview();
1916 self.edit(edits, None, cx);
1917 if preserve_preview {
1918 self.refresh_preview();
1919 }
1920 }
1921
1922 /// Create a minimal edit that will cause the given row to be indented
1923 /// with the given size. After applying this edit, the length of the line
1924 /// will always be at least `new_size.len`.
1925 pub fn edit_for_indent_size_adjustment(
1926 row: u32,
1927 current_size: IndentSize,
1928 new_size: IndentSize,
1929 ) -> Option<(Range<Point>, String)> {
1930 if new_size.kind == current_size.kind {
1931 match new_size.len.cmp(¤t_size.len) {
1932 Ordering::Greater => {
1933 let point = Point::new(row, 0);
1934 Some((
1935 point..point,
1936 iter::repeat(new_size.char())
1937 .take((new_size.len - current_size.len) as usize)
1938 .collect::<String>(),
1939 ))
1940 }
1941
1942 Ordering::Less => Some((
1943 Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
1944 String::new(),
1945 )),
1946
1947 Ordering::Equal => None,
1948 }
1949 } else {
1950 Some((
1951 Point::new(row, 0)..Point::new(row, current_size.len),
1952 iter::repeat(new_size.char())
1953 .take(new_size.len as usize)
1954 .collect::<String>(),
1955 ))
1956 }
1957 }
1958
1959 /// Spawns a background task that asynchronously computes a `Diff` between the buffer's text
1960 /// and the given new text.
1961 pub fn diff(&self, mut new_text: String, cx: &App) -> Task<Diff> {
1962 let old_text = self.as_rope().clone();
1963 let base_version = self.version();
1964 cx.background_executor()
1965 .spawn_labeled(*BUFFER_DIFF_TASK, async move {
1966 let old_text = old_text.to_string();
1967 let line_ending = LineEnding::detect(&new_text);
1968 LineEnding::normalize(&mut new_text);
1969 let edits = text_diff(&old_text, &new_text);
1970 Diff {
1971 base_version,
1972 line_ending,
1973 edits,
1974 }
1975 })
1976 }
1977
1978 /// Spawns a background task that searches the buffer for any whitespace
1979 /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
1980 pub fn remove_trailing_whitespace(&self, cx: &App) -> Task<Diff> {
1981 let old_text = self.as_rope().clone();
1982 let line_ending = self.line_ending();
1983 let base_version = self.version();
1984 cx.background_spawn(async move {
1985 let ranges = trailing_whitespace_ranges(&old_text);
1986 let empty = Arc::<str>::from("");
1987 Diff {
1988 base_version,
1989 line_ending,
1990 edits: ranges
1991 .into_iter()
1992 .map(|range| (range, empty.clone()))
1993 .collect(),
1994 }
1995 })
1996 }
1997
1998 /// Ensures that the buffer ends with a single newline character, and
1999 /// no other whitespace. Skips if the buffer is empty.
2000 pub fn ensure_final_newline(&mut self, cx: &mut Context<Self>) {
2001 let len = self.len();
2002 if len == 0 {
2003 return;
2004 }
2005 let mut offset = len;
2006 for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
2007 let non_whitespace_len = chunk
2008 .trim_end_matches(|c: char| c.is_ascii_whitespace())
2009 .len();
2010 offset -= chunk.len();
2011 offset += non_whitespace_len;
2012 if non_whitespace_len != 0 {
2013 if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
2014 return;
2015 }
2016 break;
2017 }
2018 }
2019 self.edit([(offset..len, "\n")], None, cx);
2020 }
2021
2022 /// Applies a diff to the buffer. If the buffer has changed since the given diff was
2023 /// calculated, then adjust the diff to account for those changes, and discard any
2024 /// parts of the diff that conflict with those changes.
2025 pub fn apply_diff(&mut self, diff: Diff, cx: &mut Context<Self>) -> Option<TransactionId> {
2026 let snapshot = self.snapshot();
2027 let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
2028 let mut delta = 0;
2029 let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
2030 while let Some(edit_since) = edits_since.peek() {
2031 // If the edit occurs after a diff hunk, then it does not
2032 // affect that hunk.
2033 if edit_since.old.start > range.end {
2034 break;
2035 }
2036 // If the edit precedes the diff hunk, then adjust the hunk
2037 // to reflect the edit.
2038 else if edit_since.old.end < range.start {
2039 delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
2040 edits_since.next();
2041 }
2042 // If the edit intersects a diff hunk, then discard that hunk.
2043 else {
2044 return None;
2045 }
2046 }
2047
2048 let start = (range.start as i64 + delta) as usize;
2049 let end = (range.end as i64 + delta) as usize;
2050 Some((start..end, new_text))
2051 });
2052
2053 self.start_transaction();
2054 self.text.set_line_ending(diff.line_ending);
2055 self.edit(adjusted_edits, None, cx);
2056 self.end_transaction(cx)
2057 }
2058
2059 pub fn has_unsaved_edits(&self) -> bool {
2060 let (last_version, has_unsaved_edits) = self.has_unsaved_edits.take();
2061
2062 if last_version == self.version {
2063 self.has_unsaved_edits
2064 .set((last_version, has_unsaved_edits));
2065 return has_unsaved_edits;
2066 }
2067
2068 let has_edits = self.has_edits_since(&self.saved_version);
2069 self.has_unsaved_edits
2070 .set((self.version.clone(), has_edits));
2071 has_edits
2072 }
2073
2074 /// Checks if the buffer has unsaved changes.
2075 pub fn is_dirty(&self) -> bool {
2076 if self.capability == Capability::ReadOnly {
2077 return false;
2078 }
2079 if self.has_conflict {
2080 return true;
2081 }
2082 match self.file.as_ref().map(|f| f.disk_state()) {
2083 Some(DiskState::New) | Some(DiskState::Deleted) => {
2084 !self.is_empty() && self.has_unsaved_edits()
2085 }
2086 _ => self.has_unsaved_edits(),
2087 }
2088 }
2089
2090 /// Checks if the buffer and its file have both changed since the buffer
2091 /// was last saved or reloaded.
2092 pub fn has_conflict(&self) -> bool {
2093 if self.has_conflict {
2094 return true;
2095 }
2096 let Some(file) = self.file.as_ref() else {
2097 return false;
2098 };
2099 match file.disk_state() {
2100 DiskState::New => false,
2101 DiskState::Present { mtime } => match self.saved_mtime {
2102 Some(saved_mtime) => {
2103 mtime.bad_is_greater_than(saved_mtime) && self.has_unsaved_edits()
2104 }
2105 None => true,
2106 },
2107 DiskState::Deleted => false,
2108 }
2109 }
2110
2111 /// Gets a [`Subscription`] that tracks all of the changes to the buffer's text.
2112 pub fn subscribe(&mut self) -> Subscription {
2113 self.text.subscribe()
2114 }
2115
2116 /// Adds a bit to the list of bits that are set when the buffer's text changes.
2117 ///
2118 /// This allows downstream code to check if the buffer's text has changed without
2119 /// waiting for an effect cycle, which would be required if using eents.
2120 pub fn record_changes(&mut self, bit: rc::Weak<Cell<bool>>) {
2121 if let Err(ix) = self
2122 .change_bits
2123 .binary_search_by_key(&rc::Weak::as_ptr(&bit), rc::Weak::as_ptr)
2124 {
2125 self.change_bits.insert(ix, bit);
2126 }
2127 }
2128
2129 /// Set the change bit for all "listeners".
2130 fn was_changed(&mut self) {
2131 self.change_bits.retain(|change_bit| {
2132 change_bit
2133 .upgrade()
2134 .inspect(|bit| {
2135 _ = bit.replace(true);
2136 })
2137 .is_some()
2138 });
2139 }
2140
2141 /// Starts a transaction, if one is not already in-progress. When undoing or
2142 /// redoing edits, all of the edits performed within a transaction are undone
2143 /// or redone together.
2144 pub fn start_transaction(&mut self) -> Option<TransactionId> {
2145 self.start_transaction_at(Instant::now())
2146 }
2147
2148 /// Starts a transaction, providing the current time. Subsequent transactions
2149 /// that occur within a short period of time will be grouped together. This
2150 /// is controlled by the buffer's undo grouping duration.
2151 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
2152 self.transaction_depth += 1;
2153 if self.was_dirty_before_starting_transaction.is_none() {
2154 self.was_dirty_before_starting_transaction = Some(self.is_dirty());
2155 }
2156 self.text.start_transaction_at(now)
2157 }
2158
2159 /// Terminates the current transaction, if this is the outermost transaction.
2160 pub fn end_transaction(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2161 self.end_transaction_at(Instant::now(), cx)
2162 }
2163
2164 /// Terminates the current transaction, providing the current time. Subsequent transactions
2165 /// that occur within a short period of time will be grouped together. This
2166 /// is controlled by the buffer's undo grouping duration.
2167 pub fn end_transaction_at(
2168 &mut self,
2169 now: Instant,
2170 cx: &mut Context<Self>,
2171 ) -> Option<TransactionId> {
2172 assert!(self.transaction_depth > 0);
2173 self.transaction_depth -= 1;
2174 let was_dirty = if self.transaction_depth == 0 {
2175 self.was_dirty_before_starting_transaction.take().unwrap()
2176 } else {
2177 false
2178 };
2179 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
2180 self.did_edit(&start_version, was_dirty, cx);
2181 Some(transaction_id)
2182 } else {
2183 None
2184 }
2185 }
2186
2187 /// Manually add a transaction to the buffer's undo history.
2188 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
2189 self.text.push_transaction(transaction, now);
2190 }
2191
2192 /// Differs from `push_transaction` in that it does not clear the redo
2193 /// stack. Intended to be used to create a parent transaction to merge
2194 /// potential child transactions into.
2195 ///
2196 /// The caller is responsible for removing it from the undo history using
2197 /// `forget_transaction` if no edits are merged into it. Otherwise, if edits
2198 /// are merged into this transaction, the caller is responsible for ensuring
2199 /// the redo stack is cleared. The easiest way to ensure the redo stack is
2200 /// cleared is to create transactions with the usual `start_transaction` and
2201 /// `end_transaction` methods and merging the resulting transactions into
2202 /// the transaction created by this method
2203 pub fn push_empty_transaction(&mut self, now: Instant) -> TransactionId {
2204 self.text.push_empty_transaction(now)
2205 }
2206
2207 /// Prevent the last transaction from being grouped with any subsequent transactions,
2208 /// even if they occur with the buffer's undo grouping duration.
2209 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
2210 self.text.finalize_last_transaction()
2211 }
2212
2213 /// Manually group all changes since a given transaction.
2214 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
2215 self.text.group_until_transaction(transaction_id);
2216 }
2217
2218 /// Manually remove a transaction from the buffer's undo history
2219 pub fn forget_transaction(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
2220 self.text.forget_transaction(transaction_id)
2221 }
2222
2223 /// Retrieve a transaction from the buffer's undo history
2224 pub fn get_transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> {
2225 self.text.get_transaction(transaction_id)
2226 }
2227
2228 /// Manually merge two transactions in the buffer's undo history.
2229 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
2230 self.text.merge_transactions(transaction, destination);
2231 }
2232
2233 /// Waits for the buffer to receive operations with the given timestamps.
2234 pub fn wait_for_edits<It: IntoIterator<Item = clock::Lamport>>(
2235 &mut self,
2236 edit_ids: It,
2237 ) -> impl Future<Output = Result<()>> + use<It> {
2238 self.text.wait_for_edits(edit_ids)
2239 }
2240
2241 /// Waits for the buffer to receive the operations necessary for resolving the given anchors.
2242 pub fn wait_for_anchors<It: IntoIterator<Item = Anchor>>(
2243 &mut self,
2244 anchors: It,
2245 ) -> impl 'static + Future<Output = Result<()>> + use<It> {
2246 self.text.wait_for_anchors(anchors)
2247 }
2248
2249 /// Waits for the buffer to receive operations up to the given version.
2250 pub fn wait_for_version(
2251 &mut self,
2252 version: clock::Global,
2253 ) -> impl Future<Output = Result<()>> + use<> {
2254 self.text.wait_for_version(version)
2255 }
2256
2257 /// Forces all futures returned by [`Buffer::wait_for_version`], [`Buffer::wait_for_edits`], or
2258 /// [`Buffer::wait_for_version`] to resolve with an error.
2259 pub fn give_up_waiting(&mut self) {
2260 self.text.give_up_waiting();
2261 }
2262
2263 pub fn wait_for_autoindent_applied(&mut self) -> Option<oneshot::Receiver<()>> {
2264 let mut rx = None;
2265 if !self.autoindent_requests.is_empty() {
2266 let channel = oneshot::channel();
2267 self.wait_for_autoindent_txs.push(channel.0);
2268 rx = Some(channel.1);
2269 }
2270 rx
2271 }
2272
2273 /// Stores a set of selections that should be broadcasted to all of the buffer's replicas.
2274 pub fn set_active_selections(
2275 &mut self,
2276 selections: Arc<[Selection<Anchor>]>,
2277 line_mode: bool,
2278 cursor_shape: CursorShape,
2279 cx: &mut Context<Self>,
2280 ) {
2281 let lamport_timestamp = self.text.lamport_clock.tick();
2282 self.remote_selections.insert(
2283 self.text.replica_id(),
2284 SelectionSet {
2285 selections: selections.clone(),
2286 lamport_timestamp,
2287 line_mode,
2288 cursor_shape,
2289 },
2290 );
2291 self.send_operation(
2292 Operation::UpdateSelections {
2293 selections,
2294 line_mode,
2295 lamport_timestamp,
2296 cursor_shape,
2297 },
2298 true,
2299 cx,
2300 );
2301 self.non_text_state_update_count += 1;
2302 cx.notify();
2303 }
2304
2305 /// Clears the selections, so that other replicas of the buffer do not see any selections for
2306 /// this replica.
2307 pub fn remove_active_selections(&mut self, cx: &mut Context<Self>) {
2308 if self
2309 .remote_selections
2310 .get(&self.text.replica_id())
2311 .is_none_or(|set| !set.selections.is_empty())
2312 {
2313 self.set_active_selections(Arc::default(), false, Default::default(), cx);
2314 }
2315 }
2316
2317 pub fn set_agent_selections(
2318 &mut self,
2319 selections: Arc<[Selection<Anchor>]>,
2320 line_mode: bool,
2321 cursor_shape: CursorShape,
2322 cx: &mut Context<Self>,
2323 ) {
2324 let lamport_timestamp = self.text.lamport_clock.tick();
2325 self.remote_selections.insert(
2326 ReplicaId::AGENT,
2327 SelectionSet {
2328 selections,
2329 lamport_timestamp,
2330 line_mode,
2331 cursor_shape,
2332 },
2333 );
2334 self.non_text_state_update_count += 1;
2335 cx.notify();
2336 }
2337
2338 pub fn remove_agent_selections(&mut self, cx: &mut Context<Self>) {
2339 self.set_agent_selections(Arc::default(), false, Default::default(), cx);
2340 }
2341
2342 /// Replaces the buffer's entire text.
2343 pub fn set_text<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2344 where
2345 T: Into<Arc<str>>,
2346 {
2347 self.autoindent_requests.clear();
2348 self.edit([(0..self.len(), text)], None, cx)
2349 }
2350
2351 /// Appends the given text to the end of the buffer.
2352 pub fn append<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2353 where
2354 T: Into<Arc<str>>,
2355 {
2356 self.edit([(self.len()..self.len(), text)], None, cx)
2357 }
2358
2359 /// Applies the given edits to the buffer. Each edit is specified as a range of text to
2360 /// delete, and a string of text to insert at that location.
2361 ///
2362 /// If an [`AutoindentMode`] is provided, then the buffer will enqueue an auto-indent
2363 /// request for the edited ranges, which will be processed when the buffer finishes
2364 /// parsing.
2365 ///
2366 /// Parsing takes place at the end of a transaction, and may compute synchronously
2367 /// or asynchronously, depending on the changes.
2368 pub fn edit<I, S, T>(
2369 &mut self,
2370 edits_iter: I,
2371 autoindent_mode: Option<AutoindentMode>,
2372 cx: &mut Context<Self>,
2373 ) -> Option<clock::Lamport>
2374 where
2375 I: IntoIterator<Item = (Range<S>, T)>,
2376 S: ToOffset,
2377 T: Into<Arc<str>>,
2378 {
2379 // Skip invalid edits and coalesce contiguous ones.
2380 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
2381
2382 for (range, new_text) in edits_iter {
2383 let mut range = range.start.to_offset(self)..range.end.to_offset(self);
2384
2385 if range.start > range.end {
2386 mem::swap(&mut range.start, &mut range.end);
2387 }
2388 let new_text = new_text.into();
2389 if !new_text.is_empty() || !range.is_empty() {
2390 if let Some((prev_range, prev_text)) = edits.last_mut()
2391 && prev_range.end >= range.start
2392 {
2393 prev_range.end = cmp::max(prev_range.end, range.end);
2394 *prev_text = format!("{prev_text}{new_text}").into();
2395 } else {
2396 edits.push((range, new_text));
2397 }
2398 }
2399 }
2400 if edits.is_empty() {
2401 return None;
2402 }
2403
2404 self.start_transaction();
2405 self.pending_autoindent.take();
2406 let autoindent_request = autoindent_mode
2407 .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
2408
2409 let edit_operation = self.text.edit(edits.iter().cloned());
2410 let edit_id = edit_operation.timestamp();
2411
2412 if let Some((before_edit, mode)) = autoindent_request {
2413 let mut delta = 0isize;
2414 let mut previous_setting = None;
2415 let entries: Vec<_> = edits
2416 .into_iter()
2417 .enumerate()
2418 .zip(&edit_operation.as_edit().unwrap().new_text)
2419 .filter(|((_, (range, _)), _)| {
2420 let language = before_edit.language_at(range.start);
2421 let language_id = language.map(|l| l.id());
2422 if let Some((cached_language_id, auto_indent)) = previous_setting
2423 && cached_language_id == language_id
2424 {
2425 auto_indent
2426 } else {
2427 // The auto-indent setting is not present in editorconfigs, hence
2428 // we can avoid passing the file here.
2429 let auto_indent =
2430 language_settings(language.map(|l| l.name()), None, cx).auto_indent;
2431 previous_setting = Some((language_id, auto_indent));
2432 auto_indent
2433 }
2434 })
2435 .map(|((ix, (range, _)), new_text)| {
2436 let new_text_length = new_text.len();
2437 let old_start = range.start.to_point(&before_edit);
2438 let new_start = (delta + range.start as isize) as usize;
2439 let range_len = range.end - range.start;
2440 delta += new_text_length as isize - range_len as isize;
2441
2442 // Decide what range of the insertion to auto-indent, and whether
2443 // the first line of the insertion should be considered a newly-inserted line
2444 // or an edit to an existing line.
2445 let mut range_of_insertion_to_indent = 0..new_text_length;
2446 let mut first_line_is_new = true;
2447
2448 let old_line_start = before_edit.indent_size_for_line(old_start.row).len;
2449 let old_line_end = before_edit.line_len(old_start.row);
2450
2451 if old_start.column > old_line_start {
2452 first_line_is_new = false;
2453 }
2454
2455 if !new_text.contains('\n')
2456 && (old_start.column + (range_len as u32) < old_line_end
2457 || old_line_end == old_line_start)
2458 {
2459 first_line_is_new = false;
2460 }
2461
2462 // When inserting text starting with a newline, avoid auto-indenting the
2463 // previous line.
2464 if new_text.starts_with('\n') {
2465 range_of_insertion_to_indent.start += 1;
2466 first_line_is_new = true;
2467 }
2468
2469 let mut original_indent_column = None;
2470 if let AutoindentMode::Block {
2471 original_indent_columns,
2472 } = &mode
2473 {
2474 original_indent_column = Some(if new_text.starts_with('\n') {
2475 indent_size_for_text(
2476 new_text[range_of_insertion_to_indent.clone()].chars(),
2477 )
2478 .len
2479 } else {
2480 original_indent_columns
2481 .get(ix)
2482 .copied()
2483 .flatten()
2484 .unwrap_or_else(|| {
2485 indent_size_for_text(
2486 new_text[range_of_insertion_to_indent.clone()].chars(),
2487 )
2488 .len
2489 })
2490 });
2491
2492 // Avoid auto-indenting the line after the edit.
2493 if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
2494 range_of_insertion_to_indent.end -= 1;
2495 }
2496 }
2497
2498 AutoindentRequestEntry {
2499 first_line_is_new,
2500 original_indent_column,
2501 indent_size: before_edit.language_indent_size_at(range.start, cx),
2502 range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
2503 ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
2504 }
2505 })
2506 .collect();
2507
2508 if !entries.is_empty() {
2509 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2510 before_edit,
2511 entries,
2512 is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
2513 ignore_empty_lines: false,
2514 }));
2515 }
2516 }
2517
2518 self.end_transaction(cx);
2519 self.send_operation(Operation::Buffer(edit_operation), true, cx);
2520 Some(edit_id)
2521 }
2522
2523 fn did_edit(&mut self, old_version: &clock::Global, was_dirty: bool, cx: &mut Context<Self>) {
2524 self.was_changed();
2525
2526 if self.edits_since::<usize>(old_version).next().is_none() {
2527 return;
2528 }
2529
2530 self.reparse(cx);
2531 cx.emit(BufferEvent::Edited);
2532 if was_dirty != self.is_dirty() {
2533 cx.emit(BufferEvent::DirtyChanged);
2534 }
2535 cx.notify();
2536 }
2537
2538 pub fn autoindent_ranges<I, T>(&mut self, ranges: I, cx: &mut Context<Self>)
2539 where
2540 I: IntoIterator<Item = Range<T>>,
2541 T: ToOffset + Copy,
2542 {
2543 let before_edit = self.snapshot();
2544 let entries = ranges
2545 .into_iter()
2546 .map(|range| AutoindentRequestEntry {
2547 range: before_edit.anchor_before(range.start)..before_edit.anchor_after(range.end),
2548 first_line_is_new: true,
2549 indent_size: before_edit.language_indent_size_at(range.start, cx),
2550 original_indent_column: None,
2551 })
2552 .collect();
2553 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2554 before_edit,
2555 entries,
2556 is_block_mode: false,
2557 ignore_empty_lines: true,
2558 }));
2559 self.request_autoindent(cx);
2560 }
2561
2562 // Inserts newlines at the given position to create an empty line, returning the start of the new line.
2563 // You can also request the insertion of empty lines above and below the line starting at the returned point.
2564 pub fn insert_empty_line(
2565 &mut self,
2566 position: impl ToPoint,
2567 space_above: bool,
2568 space_below: bool,
2569 cx: &mut Context<Self>,
2570 ) -> Point {
2571 let mut position = position.to_point(self);
2572
2573 self.start_transaction();
2574
2575 self.edit(
2576 [(position..position, "\n")],
2577 Some(AutoindentMode::EachLine),
2578 cx,
2579 );
2580
2581 if position.column > 0 {
2582 position += Point::new(1, 0);
2583 }
2584
2585 if !self.is_line_blank(position.row) {
2586 self.edit(
2587 [(position..position, "\n")],
2588 Some(AutoindentMode::EachLine),
2589 cx,
2590 );
2591 }
2592
2593 if space_above && position.row > 0 && !self.is_line_blank(position.row - 1) {
2594 self.edit(
2595 [(position..position, "\n")],
2596 Some(AutoindentMode::EachLine),
2597 cx,
2598 );
2599 position.row += 1;
2600 }
2601
2602 if space_below
2603 && (position.row == self.max_point().row || !self.is_line_blank(position.row + 1))
2604 {
2605 self.edit(
2606 [(position..position, "\n")],
2607 Some(AutoindentMode::EachLine),
2608 cx,
2609 );
2610 }
2611
2612 self.end_transaction(cx);
2613
2614 position
2615 }
2616
2617 /// Applies the given remote operations to the buffer.
2618 pub fn apply_ops<I: IntoIterator<Item = Operation>>(&mut self, ops: I, cx: &mut Context<Self>) {
2619 self.pending_autoindent.take();
2620 let was_dirty = self.is_dirty();
2621 let old_version = self.version.clone();
2622 let mut deferred_ops = Vec::new();
2623 let buffer_ops = ops
2624 .into_iter()
2625 .filter_map(|op| match op {
2626 Operation::Buffer(op) => Some(op),
2627 _ => {
2628 if self.can_apply_op(&op) {
2629 self.apply_op(op, cx);
2630 } else {
2631 deferred_ops.push(op);
2632 }
2633 None
2634 }
2635 })
2636 .collect::<Vec<_>>();
2637 for operation in buffer_ops.iter() {
2638 self.send_operation(Operation::Buffer(operation.clone()), false, cx);
2639 }
2640 self.text.apply_ops(buffer_ops);
2641 self.deferred_ops.insert(deferred_ops);
2642 self.flush_deferred_ops(cx);
2643 self.did_edit(&old_version, was_dirty, cx);
2644 // Notify independently of whether the buffer was edited as the operations could include a
2645 // selection update.
2646 cx.notify();
2647 }
2648
2649 fn flush_deferred_ops(&mut self, cx: &mut Context<Self>) {
2650 let mut deferred_ops = Vec::new();
2651 for op in self.deferred_ops.drain().iter().cloned() {
2652 if self.can_apply_op(&op) {
2653 self.apply_op(op, cx);
2654 } else {
2655 deferred_ops.push(op);
2656 }
2657 }
2658 self.deferred_ops.insert(deferred_ops);
2659 }
2660
2661 pub fn has_deferred_ops(&self) -> bool {
2662 !self.deferred_ops.is_empty() || self.text.has_deferred_ops()
2663 }
2664
2665 fn can_apply_op(&self, operation: &Operation) -> bool {
2666 match operation {
2667 Operation::Buffer(_) => {
2668 unreachable!("buffer operations should never be applied at this layer")
2669 }
2670 Operation::UpdateDiagnostics {
2671 diagnostics: diagnostic_set,
2672 ..
2673 } => diagnostic_set.iter().all(|diagnostic| {
2674 self.text.can_resolve(&diagnostic.range.start)
2675 && self.text.can_resolve(&diagnostic.range.end)
2676 }),
2677 Operation::UpdateSelections { selections, .. } => selections
2678 .iter()
2679 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
2680 Operation::UpdateCompletionTriggers { .. } | Operation::UpdateLineEnding { .. } => true,
2681 }
2682 }
2683
2684 fn apply_op(&mut self, operation: Operation, cx: &mut Context<Self>) {
2685 match operation {
2686 Operation::Buffer(_) => {
2687 unreachable!("buffer operations should never be applied at this layer")
2688 }
2689 Operation::UpdateDiagnostics {
2690 server_id,
2691 diagnostics: diagnostic_set,
2692 lamport_timestamp,
2693 } => {
2694 let snapshot = self.snapshot();
2695 self.apply_diagnostic_update(
2696 server_id,
2697 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
2698 lamport_timestamp,
2699 cx,
2700 );
2701 }
2702 Operation::UpdateSelections {
2703 selections,
2704 lamport_timestamp,
2705 line_mode,
2706 cursor_shape,
2707 } => {
2708 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id)
2709 && set.lamport_timestamp > lamport_timestamp
2710 {
2711 return;
2712 }
2713
2714 self.remote_selections.insert(
2715 lamport_timestamp.replica_id,
2716 SelectionSet {
2717 selections,
2718 lamport_timestamp,
2719 line_mode,
2720 cursor_shape,
2721 },
2722 );
2723 self.text.lamport_clock.observe(lamport_timestamp);
2724 self.non_text_state_update_count += 1;
2725 }
2726 Operation::UpdateCompletionTriggers {
2727 triggers,
2728 lamport_timestamp,
2729 server_id,
2730 } => {
2731 if triggers.is_empty() {
2732 self.completion_triggers_per_language_server
2733 .remove(&server_id);
2734 self.completion_triggers = self
2735 .completion_triggers_per_language_server
2736 .values()
2737 .flat_map(|triggers| triggers.iter().cloned())
2738 .collect();
2739 } else {
2740 self.completion_triggers_per_language_server
2741 .insert(server_id, triggers.iter().cloned().collect());
2742 self.completion_triggers.extend(triggers);
2743 }
2744 self.text.lamport_clock.observe(lamport_timestamp);
2745 }
2746 Operation::UpdateLineEnding {
2747 line_ending,
2748 lamport_timestamp,
2749 } => {
2750 self.text.set_line_ending(line_ending);
2751 self.text.lamport_clock.observe(lamport_timestamp);
2752 }
2753 }
2754 }
2755
2756 fn apply_diagnostic_update(
2757 &mut self,
2758 server_id: LanguageServerId,
2759 diagnostics: DiagnosticSet,
2760 lamport_timestamp: clock::Lamport,
2761 cx: &mut Context<Self>,
2762 ) {
2763 if lamport_timestamp > self.diagnostics_timestamp {
2764 let ix = self.diagnostics.binary_search_by_key(&server_id, |e| e.0);
2765 if diagnostics.is_empty() {
2766 if let Ok(ix) = ix {
2767 self.diagnostics.remove(ix);
2768 }
2769 } else {
2770 match ix {
2771 Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)),
2772 Ok(ix) => self.diagnostics[ix].1 = diagnostics,
2773 };
2774 }
2775 self.diagnostics_timestamp = lamport_timestamp;
2776 self.non_text_state_update_count += 1;
2777 self.text.lamport_clock.observe(lamport_timestamp);
2778 cx.notify();
2779 cx.emit(BufferEvent::DiagnosticsUpdated);
2780 }
2781 }
2782
2783 fn send_operation(&mut self, operation: Operation, is_local: bool, cx: &mut Context<Self>) {
2784 self.was_changed();
2785 cx.emit(BufferEvent::Operation {
2786 operation,
2787 is_local,
2788 });
2789 }
2790
2791 /// Removes the selections for a given peer.
2792 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut Context<Self>) {
2793 self.remote_selections.remove(&replica_id);
2794 cx.notify();
2795 }
2796
2797 /// Undoes the most recent transaction.
2798 pub fn undo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2799 let was_dirty = self.is_dirty();
2800 let old_version = self.version.clone();
2801
2802 if let Some((transaction_id, operation)) = self.text.undo() {
2803 self.send_operation(Operation::Buffer(operation), true, cx);
2804 self.did_edit(&old_version, was_dirty, cx);
2805 Some(transaction_id)
2806 } else {
2807 None
2808 }
2809 }
2810
2811 /// Manually undoes a specific transaction in the buffer's undo history.
2812 pub fn undo_transaction(
2813 &mut self,
2814 transaction_id: TransactionId,
2815 cx: &mut Context<Self>,
2816 ) -> bool {
2817 let was_dirty = self.is_dirty();
2818 let old_version = self.version.clone();
2819 if let Some(operation) = self.text.undo_transaction(transaction_id) {
2820 self.send_operation(Operation::Buffer(operation), true, cx);
2821 self.did_edit(&old_version, was_dirty, cx);
2822 true
2823 } else {
2824 false
2825 }
2826 }
2827
2828 /// Manually undoes all changes after a given transaction in the buffer's undo history.
2829 pub fn undo_to_transaction(
2830 &mut self,
2831 transaction_id: TransactionId,
2832 cx: &mut Context<Self>,
2833 ) -> bool {
2834 let was_dirty = self.is_dirty();
2835 let old_version = self.version.clone();
2836
2837 let operations = self.text.undo_to_transaction(transaction_id);
2838 let undone = !operations.is_empty();
2839 for operation in operations {
2840 self.send_operation(Operation::Buffer(operation), true, cx);
2841 }
2842 if undone {
2843 self.did_edit(&old_version, was_dirty, cx)
2844 }
2845 undone
2846 }
2847
2848 pub fn undo_operations(&mut self, counts: HashMap<Lamport, u32>, cx: &mut Context<Buffer>) {
2849 let was_dirty = self.is_dirty();
2850 let operation = self.text.undo_operations(counts);
2851 let old_version = self.version.clone();
2852 self.send_operation(Operation::Buffer(operation), true, cx);
2853 self.did_edit(&old_version, was_dirty, cx);
2854 }
2855
2856 /// Manually redoes a specific transaction in the buffer's redo history.
2857 pub fn redo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2858 let was_dirty = self.is_dirty();
2859 let old_version = self.version.clone();
2860
2861 if let Some((transaction_id, operation)) = self.text.redo() {
2862 self.send_operation(Operation::Buffer(operation), true, cx);
2863 self.did_edit(&old_version, was_dirty, cx);
2864 Some(transaction_id)
2865 } else {
2866 None
2867 }
2868 }
2869
2870 /// Manually undoes all changes until a given transaction in the buffer's redo history.
2871 pub fn redo_to_transaction(
2872 &mut self,
2873 transaction_id: TransactionId,
2874 cx: &mut Context<Self>,
2875 ) -> bool {
2876 let was_dirty = self.is_dirty();
2877 let old_version = self.version.clone();
2878
2879 let operations = self.text.redo_to_transaction(transaction_id);
2880 let redone = !operations.is_empty();
2881 for operation in operations {
2882 self.send_operation(Operation::Buffer(operation), true, cx);
2883 }
2884 if redone {
2885 self.did_edit(&old_version, was_dirty, cx)
2886 }
2887 redone
2888 }
2889
2890 /// Override current completion triggers with the user-provided completion triggers.
2891 pub fn set_completion_triggers(
2892 &mut self,
2893 server_id: LanguageServerId,
2894 triggers: BTreeSet<String>,
2895 cx: &mut Context<Self>,
2896 ) {
2897 self.completion_triggers_timestamp = self.text.lamport_clock.tick();
2898 if triggers.is_empty() {
2899 self.completion_triggers_per_language_server
2900 .remove(&server_id);
2901 self.completion_triggers = self
2902 .completion_triggers_per_language_server
2903 .values()
2904 .flat_map(|triggers| triggers.iter().cloned())
2905 .collect();
2906 } else {
2907 self.completion_triggers_per_language_server
2908 .insert(server_id, triggers.clone());
2909 self.completion_triggers.extend(triggers.iter().cloned());
2910 }
2911 self.send_operation(
2912 Operation::UpdateCompletionTriggers {
2913 triggers: triggers.into_iter().collect(),
2914 lamport_timestamp: self.completion_triggers_timestamp,
2915 server_id,
2916 },
2917 true,
2918 cx,
2919 );
2920 cx.notify();
2921 }
2922
2923 /// Returns a list of strings which trigger a completion menu for this language.
2924 /// Usually this is driven by LSP server which returns a list of trigger characters for completions.
2925 pub fn completion_triggers(&self) -> &BTreeSet<String> {
2926 &self.completion_triggers
2927 }
2928
2929 /// Call this directly after performing edits to prevent the preview tab
2930 /// from being dismissed by those edits. It causes `should_dismiss_preview`
2931 /// to return false until there are additional edits.
2932 pub fn refresh_preview(&mut self) {
2933 self.preview_version = self.version.clone();
2934 }
2935
2936 /// Whether we should preserve the preview status of a tab containing this buffer.
2937 pub fn preserve_preview(&self) -> bool {
2938 !self.has_edits_since(&self.preview_version)
2939 }
2940}
2941
2942#[doc(hidden)]
2943#[cfg(any(test, feature = "test-support"))]
2944impl Buffer {
2945 pub fn edit_via_marked_text(
2946 &mut self,
2947 marked_string: &str,
2948 autoindent_mode: Option<AutoindentMode>,
2949 cx: &mut Context<Self>,
2950 ) {
2951 let edits = self.edits_for_marked_text(marked_string);
2952 self.edit(edits, autoindent_mode, cx);
2953 }
2954
2955 pub fn set_group_interval(&mut self, group_interval: Duration) {
2956 self.text.set_group_interval(group_interval);
2957 }
2958
2959 pub fn randomly_edit<T>(&mut self, rng: &mut T, old_range_count: usize, cx: &mut Context<Self>)
2960 where
2961 T: rand::Rng,
2962 {
2963 let mut edits: Vec<(Range<usize>, String)> = Vec::new();
2964 let mut last_end = None;
2965 for _ in 0..old_range_count {
2966 if last_end.is_some_and(|last_end| last_end >= self.len()) {
2967 break;
2968 }
2969
2970 let new_start = last_end.map_or(0, |last_end| last_end + 1);
2971 let mut range = self.random_byte_range(new_start, rng);
2972 if rng.random_bool(0.2) {
2973 mem::swap(&mut range.start, &mut range.end);
2974 }
2975 last_end = Some(range.end);
2976
2977 let new_text_len = rng.random_range(0..10);
2978 let mut new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
2979 new_text = new_text.to_uppercase();
2980
2981 edits.push((range, new_text));
2982 }
2983 log::info!("mutating buffer {:?} with {:?}", self.replica_id(), edits);
2984 self.edit(edits, None, cx);
2985 }
2986
2987 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut Context<Self>) {
2988 let was_dirty = self.is_dirty();
2989 let old_version = self.version.clone();
2990
2991 let ops = self.text.randomly_undo_redo(rng);
2992 if !ops.is_empty() {
2993 for op in ops {
2994 self.send_operation(Operation::Buffer(op), true, cx);
2995 self.did_edit(&old_version, was_dirty, cx);
2996 }
2997 }
2998 }
2999}
3000
3001impl EventEmitter<BufferEvent> for Buffer {}
3002
3003impl Deref for Buffer {
3004 type Target = TextBuffer;
3005
3006 fn deref(&self) -> &Self::Target {
3007 &self.text
3008 }
3009}
3010
3011impl BufferSnapshot {
3012 /// Returns [`IndentSize`] for a given line that respects user settings and
3013 /// language preferences.
3014 pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
3015 indent_size_for_line(self, row)
3016 }
3017
3018 /// Returns [`IndentSize`] for a given position that respects user settings
3019 /// and language preferences.
3020 pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &App) -> IndentSize {
3021 let settings = language_settings(
3022 self.language_at(position).map(|l| l.name()),
3023 self.file(),
3024 cx,
3025 );
3026 if settings.hard_tabs {
3027 IndentSize::tab()
3028 } else {
3029 IndentSize::spaces(settings.tab_size.get())
3030 }
3031 }
3032
3033 /// Retrieve the suggested indent size for all of the given rows. The unit of indentation
3034 /// is passed in as `single_indent_size`.
3035 pub fn suggested_indents(
3036 &self,
3037 rows: impl Iterator<Item = u32>,
3038 single_indent_size: IndentSize,
3039 ) -> BTreeMap<u32, IndentSize> {
3040 let mut result = BTreeMap::new();
3041
3042 for row_range in contiguous_ranges(rows, 10) {
3043 let suggestions = match self.suggest_autoindents(row_range.clone()) {
3044 Some(suggestions) => suggestions,
3045 _ => break,
3046 };
3047
3048 for (row, suggestion) in row_range.zip(suggestions) {
3049 let indent_size = if let Some(suggestion) = suggestion {
3050 result
3051 .get(&suggestion.basis_row)
3052 .copied()
3053 .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
3054 .with_delta(suggestion.delta, single_indent_size)
3055 } else {
3056 self.indent_size_for_line(row)
3057 };
3058
3059 result.insert(row, indent_size);
3060 }
3061 }
3062
3063 result
3064 }
3065
3066 fn suggest_autoindents(
3067 &self,
3068 row_range: Range<u32>,
3069 ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
3070 let config = &self.language.as_ref()?.config;
3071 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
3072
3073 #[derive(Debug, Clone)]
3074 struct StartPosition {
3075 start: Point,
3076 suffix: SharedString,
3077 }
3078
3079 // Find the suggested indentation ranges based on the syntax tree.
3080 let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
3081 let end = Point::new(row_range.end, 0);
3082 let range = (start..end).to_offset(&self.text);
3083 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3084 Some(&grammar.indents_config.as_ref()?.query)
3085 });
3086 let indent_configs = matches
3087 .grammars()
3088 .iter()
3089 .map(|grammar| grammar.indents_config.as_ref().unwrap())
3090 .collect::<Vec<_>>();
3091
3092 let mut indent_ranges = Vec::<Range<Point>>::new();
3093 let mut start_positions = Vec::<StartPosition>::new();
3094 let mut outdent_positions = Vec::<Point>::new();
3095 while let Some(mat) = matches.peek() {
3096 let mut start: Option<Point> = None;
3097 let mut end: Option<Point> = None;
3098
3099 let config = indent_configs[mat.grammar_index];
3100 for capture in mat.captures {
3101 if capture.index == config.indent_capture_ix {
3102 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
3103 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
3104 } else if Some(capture.index) == config.start_capture_ix {
3105 start = Some(Point::from_ts_point(capture.node.end_position()));
3106 } else if Some(capture.index) == config.end_capture_ix {
3107 end = Some(Point::from_ts_point(capture.node.start_position()));
3108 } else if Some(capture.index) == config.outdent_capture_ix {
3109 outdent_positions.push(Point::from_ts_point(capture.node.start_position()));
3110 } else if let Some(suffix) = config.suffixed_start_captures.get(&capture.index) {
3111 start_positions.push(StartPosition {
3112 start: Point::from_ts_point(capture.node.start_position()),
3113 suffix: suffix.clone(),
3114 });
3115 }
3116 }
3117
3118 matches.advance();
3119 if let Some((start, end)) = start.zip(end) {
3120 if start.row == end.row {
3121 continue;
3122 }
3123 let range = start..end;
3124 match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
3125 Err(ix) => indent_ranges.insert(ix, range),
3126 Ok(ix) => {
3127 let prev_range = &mut indent_ranges[ix];
3128 prev_range.end = prev_range.end.max(range.end);
3129 }
3130 }
3131 }
3132 }
3133
3134 let mut error_ranges = Vec::<Range<Point>>::new();
3135 let mut matches = self
3136 .syntax
3137 .matches(range, &self.text, |grammar| grammar.error_query.as_ref());
3138 while let Some(mat) = matches.peek() {
3139 let node = mat.captures[0].node;
3140 let start = Point::from_ts_point(node.start_position());
3141 let end = Point::from_ts_point(node.end_position());
3142 let range = start..end;
3143 let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
3144 Ok(ix) | Err(ix) => ix,
3145 };
3146 let mut end_ix = ix;
3147 while let Some(existing_range) = error_ranges.get(end_ix) {
3148 if existing_range.end < end {
3149 end_ix += 1;
3150 } else {
3151 break;
3152 }
3153 }
3154 error_ranges.splice(ix..end_ix, [range]);
3155 matches.advance();
3156 }
3157
3158 outdent_positions.sort();
3159 for outdent_position in outdent_positions {
3160 // find the innermost indent range containing this outdent_position
3161 // set its end to the outdent position
3162 if let Some(range_to_truncate) = indent_ranges
3163 .iter_mut()
3164 .filter(|indent_range| indent_range.contains(&outdent_position))
3165 .next_back()
3166 {
3167 range_to_truncate.end = outdent_position;
3168 }
3169 }
3170
3171 start_positions.sort_by_key(|b| b.start);
3172
3173 // Find the suggested indentation increases and decreased based on regexes.
3174 let mut regex_outdent_map = HashMap::default();
3175 let mut last_seen_suffix: HashMap<String, Vec<Point>> = HashMap::default();
3176 let mut start_positions_iter = start_positions.iter().peekable();
3177
3178 let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
3179 self.for_each_line(
3180 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
3181 ..Point::new(row_range.end, 0),
3182 |row, line| {
3183 if config
3184 .decrease_indent_pattern
3185 .as_ref()
3186 .is_some_and(|regex| regex.is_match(line))
3187 {
3188 indent_change_rows.push((row, Ordering::Less));
3189 }
3190 if config
3191 .increase_indent_pattern
3192 .as_ref()
3193 .is_some_and(|regex| regex.is_match(line))
3194 {
3195 indent_change_rows.push((row + 1, Ordering::Greater));
3196 }
3197 while let Some(pos) = start_positions_iter.peek() {
3198 if pos.start.row < row {
3199 let pos = start_positions_iter.next().unwrap();
3200 last_seen_suffix
3201 .entry(pos.suffix.to_string())
3202 .or_default()
3203 .push(pos.start);
3204 } else {
3205 break;
3206 }
3207 }
3208 for rule in &config.decrease_indent_patterns {
3209 if rule.pattern.as_ref().is_some_and(|r| r.is_match(line)) {
3210 let row_start_column = self.indent_size_for_line(row).len;
3211 let basis_row = rule
3212 .valid_after
3213 .iter()
3214 .filter_map(|valid_suffix| last_seen_suffix.get(valid_suffix))
3215 .flatten()
3216 .filter(|start_point| start_point.column <= row_start_column)
3217 .max_by_key(|start_point| start_point.row);
3218 if let Some(outdent_to_row) = basis_row {
3219 regex_outdent_map.insert(row, outdent_to_row.row);
3220 }
3221 break;
3222 }
3223 }
3224 },
3225 );
3226
3227 let mut indent_changes = indent_change_rows.into_iter().peekable();
3228 let mut prev_row = if config.auto_indent_using_last_non_empty_line {
3229 prev_non_blank_row.unwrap_or(0)
3230 } else {
3231 row_range.start.saturating_sub(1)
3232 };
3233
3234 let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
3235 Some(row_range.map(move |row| {
3236 let row_start = Point::new(row, self.indent_size_for_line(row).len);
3237
3238 let mut indent_from_prev_row = false;
3239 let mut outdent_from_prev_row = false;
3240 let mut outdent_to_row = u32::MAX;
3241 let mut from_regex = false;
3242
3243 while let Some((indent_row, delta)) = indent_changes.peek() {
3244 match indent_row.cmp(&row) {
3245 Ordering::Equal => match delta {
3246 Ordering::Less => {
3247 from_regex = true;
3248 outdent_from_prev_row = true
3249 }
3250 Ordering::Greater => {
3251 indent_from_prev_row = true;
3252 from_regex = true
3253 }
3254 _ => {}
3255 },
3256
3257 Ordering::Greater => break,
3258 Ordering::Less => {}
3259 }
3260
3261 indent_changes.next();
3262 }
3263
3264 for range in &indent_ranges {
3265 if range.start.row >= row {
3266 break;
3267 }
3268 if range.start.row == prev_row && range.end > row_start {
3269 indent_from_prev_row = true;
3270 }
3271 if range.end > prev_row_start && range.end <= row_start {
3272 outdent_to_row = outdent_to_row.min(range.start.row);
3273 }
3274 }
3275
3276 if let Some(basis_row) = regex_outdent_map.get(&row) {
3277 indent_from_prev_row = false;
3278 outdent_to_row = *basis_row;
3279 from_regex = true;
3280 }
3281
3282 let within_error = error_ranges
3283 .iter()
3284 .any(|e| e.start.row < row && e.end > row_start);
3285
3286 let suggestion = if outdent_to_row == prev_row
3287 || (outdent_from_prev_row && indent_from_prev_row)
3288 {
3289 Some(IndentSuggestion {
3290 basis_row: prev_row,
3291 delta: Ordering::Equal,
3292 within_error: within_error && !from_regex,
3293 })
3294 } else if indent_from_prev_row {
3295 Some(IndentSuggestion {
3296 basis_row: prev_row,
3297 delta: Ordering::Greater,
3298 within_error: within_error && !from_regex,
3299 })
3300 } else if outdent_to_row < prev_row {
3301 Some(IndentSuggestion {
3302 basis_row: outdent_to_row,
3303 delta: Ordering::Equal,
3304 within_error: within_error && !from_regex,
3305 })
3306 } else if outdent_from_prev_row {
3307 Some(IndentSuggestion {
3308 basis_row: prev_row,
3309 delta: Ordering::Less,
3310 within_error: within_error && !from_regex,
3311 })
3312 } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
3313 {
3314 Some(IndentSuggestion {
3315 basis_row: prev_row,
3316 delta: Ordering::Equal,
3317 within_error: within_error && !from_regex,
3318 })
3319 } else {
3320 None
3321 };
3322
3323 prev_row = row;
3324 prev_row_start = row_start;
3325 suggestion
3326 }))
3327 }
3328
3329 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
3330 while row > 0 {
3331 row -= 1;
3332 if !self.is_line_blank(row) {
3333 return Some(row);
3334 }
3335 }
3336 None
3337 }
3338
3339 fn get_highlights(&self, range: Range<usize>) -> (SyntaxMapCaptures<'_>, Vec<HighlightMap>) {
3340 let captures = self.syntax.captures(range, &self.text, |grammar| {
3341 grammar
3342 .highlights_config
3343 .as_ref()
3344 .map(|config| &config.query)
3345 });
3346 let highlight_maps = captures
3347 .grammars()
3348 .iter()
3349 .map(|grammar| grammar.highlight_map())
3350 .collect();
3351 (captures, highlight_maps)
3352 }
3353
3354 /// Iterates over chunks of text in the given range of the buffer. Text is chunked
3355 /// in an arbitrary way due to being stored in a [`Rope`](text::Rope). The text is also
3356 /// returned in chunks where each chunk has a single syntax highlighting style and
3357 /// diagnostic status.
3358 pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks<'_> {
3359 let range = range.start.to_offset(self)..range.end.to_offset(self);
3360
3361 let mut syntax = None;
3362 if language_aware {
3363 syntax = Some(self.get_highlights(range.clone()));
3364 }
3365 // We want to look at diagnostic spans only when iterating over language-annotated chunks.
3366 let diagnostics = language_aware;
3367 BufferChunks::new(self.text.as_rope(), range, syntax, diagnostics, Some(self))
3368 }
3369
3370 pub fn highlighted_text_for_range<T: ToOffset>(
3371 &self,
3372 range: Range<T>,
3373 override_style: Option<HighlightStyle>,
3374 syntax_theme: &SyntaxTheme,
3375 ) -> HighlightedText {
3376 HighlightedText::from_buffer_range(
3377 range,
3378 &self.text,
3379 &self.syntax,
3380 override_style,
3381 syntax_theme,
3382 )
3383 }
3384
3385 /// Invokes the given callback for each line of text in the given range of the buffer.
3386 /// Uses callback to avoid allocating a string for each line.
3387 fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
3388 let mut line = String::new();
3389 let mut row = range.start.row;
3390 for chunk in self
3391 .as_rope()
3392 .chunks_in_range(range.to_offset(self))
3393 .chain(["\n"])
3394 {
3395 for (newline_ix, text) in chunk.split('\n').enumerate() {
3396 if newline_ix > 0 {
3397 callback(row, &line);
3398 row += 1;
3399 line.clear();
3400 }
3401 line.push_str(text);
3402 }
3403 }
3404 }
3405
3406 /// Iterates over every [`SyntaxLayer`] in the buffer.
3407 pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3408 self.syntax_layers_for_range(0..self.len(), true)
3409 }
3410
3411 pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayer<'_>> {
3412 let offset = position.to_offset(self);
3413 self.syntax_layers_for_range(offset..offset, false)
3414 .filter(|l| {
3415 if let Some(ranges) = l.included_sub_ranges {
3416 ranges.iter().any(|range| {
3417 let start = range.start.to_offset(self);
3418 start <= offset && {
3419 let end = range.end.to_offset(self);
3420 offset < end
3421 }
3422 })
3423 } else {
3424 l.node().start_byte() <= offset && l.node().end_byte() > offset
3425 }
3426 })
3427 .last()
3428 }
3429
3430 pub fn syntax_layers_for_range<D: ToOffset>(
3431 &self,
3432 range: Range<D>,
3433 include_hidden: bool,
3434 ) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3435 self.syntax
3436 .layers_for_range(range, &self.text, include_hidden)
3437 }
3438
3439 pub fn smallest_syntax_layer_containing<D: ToOffset>(
3440 &self,
3441 range: Range<D>,
3442 ) -> Option<SyntaxLayer<'_>> {
3443 let range = range.to_offset(self);
3444 self.syntax
3445 .layers_for_range(range, &self.text, false)
3446 .max_by(|a, b| {
3447 if a.depth != b.depth {
3448 a.depth.cmp(&b.depth)
3449 } else if a.offset.0 != b.offset.0 {
3450 a.offset.0.cmp(&b.offset.0)
3451 } else {
3452 a.node().end_byte().cmp(&b.node().end_byte()).reverse()
3453 }
3454 })
3455 }
3456
3457 /// Returns the main [`Language`].
3458 pub fn language(&self) -> Option<&Arc<Language>> {
3459 self.language.as_ref()
3460 }
3461
3462 /// Returns the [`Language`] at the given location.
3463 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
3464 self.syntax_layer_at(position)
3465 .map(|info| info.language)
3466 .or(self.language.as_ref())
3467 }
3468
3469 /// Returns the settings for the language at the given location.
3470 pub fn settings_at<'a, D: ToOffset>(
3471 &'a self,
3472 position: D,
3473 cx: &'a App,
3474 ) -> Cow<'a, LanguageSettings> {
3475 language_settings(
3476 self.language_at(position).map(|l| l.name()),
3477 self.file.as_ref(),
3478 cx,
3479 )
3480 }
3481
3482 pub fn char_classifier_at<T: ToOffset>(&self, point: T) -> CharClassifier {
3483 CharClassifier::new(self.language_scope_at(point))
3484 }
3485
3486 /// Returns the [`LanguageScope`] at the given location.
3487 pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
3488 let offset = position.to_offset(self);
3489 let mut scope = None;
3490 let mut smallest_range_and_depth: Option<(Range<usize>, usize)> = None;
3491
3492 // Use the layer that has the smallest node intersecting the given point.
3493 for layer in self
3494 .syntax
3495 .layers_for_range(offset..offset, &self.text, false)
3496 {
3497 let mut cursor = layer.node().walk();
3498
3499 let mut range = None;
3500 loop {
3501 let child_range = cursor.node().byte_range();
3502 if !child_range.contains(&offset) {
3503 break;
3504 }
3505
3506 range = Some(child_range);
3507 if cursor.goto_first_child_for_byte(offset).is_none() {
3508 break;
3509 }
3510 }
3511
3512 if let Some(range) = range
3513 && smallest_range_and_depth.as_ref().is_none_or(
3514 |(smallest_range, smallest_range_depth)| {
3515 if layer.depth > *smallest_range_depth {
3516 true
3517 } else if layer.depth == *smallest_range_depth {
3518 range.len() < smallest_range.len()
3519 } else {
3520 false
3521 }
3522 },
3523 )
3524 {
3525 smallest_range_and_depth = Some((range, layer.depth));
3526 scope = Some(LanguageScope {
3527 language: layer.language.clone(),
3528 override_id: layer.override_id(offset, &self.text),
3529 });
3530 }
3531 }
3532
3533 scope.or_else(|| {
3534 self.language.clone().map(|language| LanguageScope {
3535 language,
3536 override_id: None,
3537 })
3538 })
3539 }
3540
3541 /// Returns a tuple of the range and character kind of the word
3542 /// surrounding the given position.
3543 pub fn surrounding_word<T: ToOffset>(
3544 &self,
3545 start: T,
3546 scope_context: Option<CharScopeContext>,
3547 ) -> (Range<usize>, Option<CharKind>) {
3548 let mut start = start.to_offset(self);
3549 let mut end = start;
3550 let mut next_chars = self.chars_at(start).take(128).peekable();
3551 let mut prev_chars = self.reversed_chars_at(start).take(128).peekable();
3552
3553 let classifier = self.char_classifier_at(start).scope_context(scope_context);
3554 let word_kind = cmp::max(
3555 prev_chars.peek().copied().map(|c| classifier.kind(c)),
3556 next_chars.peek().copied().map(|c| classifier.kind(c)),
3557 );
3558
3559 for ch in prev_chars {
3560 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3561 start -= ch.len_utf8();
3562 } else {
3563 break;
3564 }
3565 }
3566
3567 for ch in next_chars {
3568 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3569 end += ch.len_utf8();
3570 } else {
3571 break;
3572 }
3573 }
3574
3575 (start..end, word_kind)
3576 }
3577
3578 /// Moves the TreeCursor to the smallest descendant or ancestor syntax node enclosing the given
3579 /// range. When `require_larger` is true, the node found must be larger than the query range.
3580 ///
3581 /// Returns true if a node was found, and false otherwise. In the `false` case the cursor will
3582 /// be moved to the root of the tree.
3583 fn goto_node_enclosing_range(
3584 cursor: &mut tree_sitter::TreeCursor,
3585 query_range: &Range<usize>,
3586 require_larger: bool,
3587 ) -> bool {
3588 let mut ascending = false;
3589 loop {
3590 let mut range = cursor.node().byte_range();
3591 if query_range.is_empty() {
3592 // When the query range is empty and the current node starts after it, move to the
3593 // previous sibling to find the node the containing node.
3594 if range.start > query_range.start {
3595 cursor.goto_previous_sibling();
3596 range = cursor.node().byte_range();
3597 }
3598 } else {
3599 // When the query range is non-empty and the current node ends exactly at the start,
3600 // move to the next sibling to find a node that extends beyond the start.
3601 if range.end == query_range.start {
3602 cursor.goto_next_sibling();
3603 range = cursor.node().byte_range();
3604 }
3605 }
3606
3607 let encloses = range.contains_inclusive(query_range)
3608 && (!require_larger || range.len() > query_range.len());
3609 if !encloses {
3610 ascending = true;
3611 if !cursor.goto_parent() {
3612 return false;
3613 }
3614 continue;
3615 } else if ascending {
3616 return true;
3617 }
3618
3619 // Descend into the current node.
3620 if cursor
3621 .goto_first_child_for_byte(query_range.start)
3622 .is_none()
3623 {
3624 return true;
3625 }
3626 }
3627 }
3628
3629 pub fn syntax_ancestor<'a, T: ToOffset>(
3630 &'a self,
3631 range: Range<T>,
3632 ) -> Option<tree_sitter::Node<'a>> {
3633 let range = range.start.to_offset(self)..range.end.to_offset(self);
3634 let mut result: Option<tree_sitter::Node<'a>> = None;
3635 for layer in self
3636 .syntax
3637 .layers_for_range(range.clone(), &self.text, true)
3638 {
3639 let mut cursor = layer.node().walk();
3640
3641 // Find the node that both contains the range and is larger than it.
3642 if !Self::goto_node_enclosing_range(&mut cursor, &range, true) {
3643 continue;
3644 }
3645
3646 let left_node = cursor.node();
3647 let mut layer_result = left_node;
3648
3649 // For an empty range, try to find another node immediately to the right of the range.
3650 if left_node.end_byte() == range.start {
3651 let mut right_node = None;
3652 while !cursor.goto_next_sibling() {
3653 if !cursor.goto_parent() {
3654 break;
3655 }
3656 }
3657
3658 while cursor.node().start_byte() == range.start {
3659 right_node = Some(cursor.node());
3660 if !cursor.goto_first_child() {
3661 break;
3662 }
3663 }
3664
3665 // If there is a candidate node on both sides of the (empty) range, then
3666 // decide between the two by favoring a named node over an anonymous token.
3667 // If both nodes are the same in that regard, favor the right one.
3668 if let Some(right_node) = right_node
3669 && (right_node.is_named() || !left_node.is_named())
3670 {
3671 layer_result = right_node;
3672 }
3673 }
3674
3675 if let Some(previous_result) = &result
3676 && previous_result.byte_range().len() < layer_result.byte_range().len()
3677 {
3678 continue;
3679 }
3680 result = Some(layer_result);
3681 }
3682
3683 result
3684 }
3685
3686 /// Find the previous sibling syntax node at the given range.
3687 ///
3688 /// This function locates the syntax node that precedes the node containing
3689 /// the given range. It searches hierarchically by:
3690 /// 1. Finding the node that contains the given range
3691 /// 2. Looking for the previous sibling at the same tree level
3692 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
3693 ///
3694 /// Returns `None` if there is no previous sibling at any ancestor level.
3695 pub fn syntax_prev_sibling<'a, T: ToOffset>(
3696 &'a self,
3697 range: Range<T>,
3698 ) -> Option<tree_sitter::Node<'a>> {
3699 let range = range.start.to_offset(self)..range.end.to_offset(self);
3700 let mut result: Option<tree_sitter::Node<'a>> = None;
3701
3702 for layer in self
3703 .syntax
3704 .layers_for_range(range.clone(), &self.text, true)
3705 {
3706 let mut cursor = layer.node().walk();
3707
3708 // Find the node that contains the range
3709 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
3710 continue;
3711 }
3712
3713 // Look for the previous sibling, moving up ancestor levels if needed
3714 loop {
3715 if cursor.goto_previous_sibling() {
3716 let layer_result = cursor.node();
3717
3718 if let Some(previous_result) = &result {
3719 if previous_result.byte_range().end < layer_result.byte_range().end {
3720 continue;
3721 }
3722 }
3723 result = Some(layer_result);
3724 break;
3725 }
3726
3727 // No sibling found at this level, try moving up to parent
3728 if !cursor.goto_parent() {
3729 break;
3730 }
3731 }
3732 }
3733
3734 result
3735 }
3736
3737 /// Find the next sibling syntax node at the given range.
3738 ///
3739 /// This function locates the syntax node that follows the node containing
3740 /// the given range. It searches hierarchically by:
3741 /// 1. Finding the node that contains the given range
3742 /// 2. Looking for the next sibling at the same tree level
3743 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
3744 ///
3745 /// Returns `None` if there is no next sibling at any ancestor level.
3746 pub fn syntax_next_sibling<'a, T: ToOffset>(
3747 &'a self,
3748 range: Range<T>,
3749 ) -> Option<tree_sitter::Node<'a>> {
3750 let range = range.start.to_offset(self)..range.end.to_offset(self);
3751 let mut result: Option<tree_sitter::Node<'a>> = None;
3752
3753 for layer in self
3754 .syntax
3755 .layers_for_range(range.clone(), &self.text, true)
3756 {
3757 let mut cursor = layer.node().walk();
3758
3759 // Find the node that contains the range
3760 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
3761 continue;
3762 }
3763
3764 // Look for the next sibling, moving up ancestor levels if needed
3765 loop {
3766 if cursor.goto_next_sibling() {
3767 let layer_result = cursor.node();
3768
3769 if let Some(previous_result) = &result {
3770 if previous_result.byte_range().start > layer_result.byte_range().start {
3771 continue;
3772 }
3773 }
3774 result = Some(layer_result);
3775 break;
3776 }
3777
3778 // No sibling found at this level, try moving up to parent
3779 if !cursor.goto_parent() {
3780 break;
3781 }
3782 }
3783 }
3784
3785 result
3786 }
3787
3788 /// Returns the root syntax node within the given row
3789 pub fn syntax_root_ancestor(&self, position: Anchor) -> Option<tree_sitter::Node<'_>> {
3790 let start_offset = position.to_offset(self);
3791
3792 let row = self.summary_for_anchor::<text::PointUtf16>(&position).row as usize;
3793
3794 let layer = self
3795 .syntax
3796 .layers_for_range(start_offset..start_offset, &self.text, true)
3797 .next()?;
3798
3799 let mut cursor = layer.node().walk();
3800
3801 // Descend to the first leaf that touches the start of the range.
3802 while cursor.goto_first_child_for_byte(start_offset).is_some() {
3803 if cursor.node().end_byte() == start_offset {
3804 cursor.goto_next_sibling();
3805 }
3806 }
3807
3808 // Ascend to the root node within the same row.
3809 while cursor.goto_parent() {
3810 if cursor.node().start_position().row != row {
3811 break;
3812 }
3813 }
3814
3815 Some(cursor.node())
3816 }
3817
3818 /// Returns the outline for the buffer.
3819 ///
3820 /// This method allows passing an optional [`SyntaxTheme`] to
3821 /// syntax-highlight the returned symbols.
3822 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Outline<Anchor> {
3823 Outline::new(self.outline_items_containing(0..self.len(), true, theme))
3824 }
3825
3826 /// Returns all the symbols that contain the given position.
3827 ///
3828 /// This method allows passing an optional [`SyntaxTheme`] to
3829 /// syntax-highlight the returned symbols.
3830 pub fn symbols_containing<T: ToOffset>(
3831 &self,
3832 position: T,
3833 theme: Option<&SyntaxTheme>,
3834 ) -> Vec<OutlineItem<Anchor>> {
3835 let position = position.to_offset(self);
3836 let start = self.clip_offset(position.saturating_sub(1), Bias::Left);
3837 let end = self.clip_offset(position + 1, Bias::Right);
3838 let mut items = self.outline_items_containing(start..end, false, theme);
3839 let mut prev_depth = None;
3840 items.retain(|item| {
3841 let result = prev_depth.is_none_or(|prev_depth| item.depth > prev_depth);
3842 prev_depth = Some(item.depth);
3843 result
3844 });
3845 items
3846 }
3847
3848 pub fn outline_range_containing<T: ToOffset>(&self, range: Range<T>) -> Option<Range<Point>> {
3849 let range = range.to_offset(self);
3850 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3851 grammar.outline_config.as_ref().map(|c| &c.query)
3852 });
3853 let configs = matches
3854 .grammars()
3855 .iter()
3856 .map(|g| g.outline_config.as_ref().unwrap())
3857 .collect::<Vec<_>>();
3858
3859 while let Some(mat) = matches.peek() {
3860 let config = &configs[mat.grammar_index];
3861 let containing_item_node = maybe!({
3862 let item_node = mat.captures.iter().find_map(|cap| {
3863 if cap.index == config.item_capture_ix {
3864 Some(cap.node)
3865 } else {
3866 None
3867 }
3868 })?;
3869
3870 let item_byte_range = item_node.byte_range();
3871 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3872 None
3873 } else {
3874 Some(item_node)
3875 }
3876 });
3877
3878 if let Some(item_node) = containing_item_node {
3879 return Some(
3880 Point::from_ts_point(item_node.start_position())
3881 ..Point::from_ts_point(item_node.end_position()),
3882 );
3883 }
3884
3885 matches.advance();
3886 }
3887 None
3888 }
3889
3890 pub fn outline_items_containing<T: ToOffset>(
3891 &self,
3892 range: Range<T>,
3893 include_extra_context: bool,
3894 theme: Option<&SyntaxTheme>,
3895 ) -> Vec<OutlineItem<Anchor>> {
3896 self.outline_items_containing_internal(
3897 range,
3898 include_extra_context,
3899 theme,
3900 |this, range| this.anchor_after(range.start)..this.anchor_before(range.end),
3901 )
3902 }
3903
3904 pub fn outline_items_as_points_containing<T: ToOffset>(
3905 &self,
3906 range: Range<T>,
3907 include_extra_context: bool,
3908 theme: Option<&SyntaxTheme>,
3909 ) -> Vec<OutlineItem<Point>> {
3910 self.outline_items_containing_internal(range, include_extra_context, theme, |_, range| {
3911 range
3912 })
3913 }
3914
3915 fn outline_items_containing_internal<T: ToOffset, U>(
3916 &self,
3917 range: Range<T>,
3918 include_extra_context: bool,
3919 theme: Option<&SyntaxTheme>,
3920 range_callback: fn(&Self, Range<Point>) -> Range<U>,
3921 ) -> Vec<OutlineItem<U>> {
3922 let range = range.to_offset(self);
3923 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3924 grammar.outline_config.as_ref().map(|c| &c.query)
3925 });
3926
3927 let mut items = Vec::new();
3928 let mut annotation_row_ranges: Vec<Range<u32>> = Vec::new();
3929 while let Some(mat) = matches.peek() {
3930 let config = matches.grammars()[mat.grammar_index]
3931 .outline_config
3932 .as_ref()
3933 .unwrap();
3934 if let Some(item) =
3935 self.next_outline_item(config, &mat, &range, include_extra_context, theme)
3936 {
3937 items.push(item);
3938 } else if let Some(capture) = mat
3939 .captures
3940 .iter()
3941 .find(|capture| Some(capture.index) == config.annotation_capture_ix)
3942 {
3943 let capture_range = capture.node.start_position()..capture.node.end_position();
3944 let mut capture_row_range =
3945 capture_range.start.row as u32..capture_range.end.row as u32;
3946 if capture_range.end.row > capture_range.start.row && capture_range.end.column == 0
3947 {
3948 capture_row_range.end -= 1;
3949 }
3950 if let Some(last_row_range) = annotation_row_ranges.last_mut() {
3951 if last_row_range.end >= capture_row_range.start.saturating_sub(1) {
3952 last_row_range.end = capture_row_range.end;
3953 } else {
3954 annotation_row_ranges.push(capture_row_range);
3955 }
3956 } else {
3957 annotation_row_ranges.push(capture_row_range);
3958 }
3959 }
3960 matches.advance();
3961 }
3962
3963 items.sort_by_key(|item| (item.range.start, Reverse(item.range.end)));
3964
3965 // Assign depths based on containment relationships and convert to anchors.
3966 let mut item_ends_stack = Vec::<Point>::new();
3967 let mut anchor_items = Vec::new();
3968 let mut annotation_row_ranges = annotation_row_ranges.into_iter().peekable();
3969 for item in items {
3970 while let Some(last_end) = item_ends_stack.last().copied() {
3971 if last_end < item.range.end {
3972 item_ends_stack.pop();
3973 } else {
3974 break;
3975 }
3976 }
3977
3978 let mut annotation_row_range = None;
3979 while let Some(next_annotation_row_range) = annotation_row_ranges.peek() {
3980 let row_preceding_item = item.range.start.row.saturating_sub(1);
3981 if next_annotation_row_range.end < row_preceding_item {
3982 annotation_row_ranges.next();
3983 } else {
3984 if next_annotation_row_range.end == row_preceding_item {
3985 annotation_row_range = Some(next_annotation_row_range.clone());
3986 annotation_row_ranges.next();
3987 }
3988 break;
3989 }
3990 }
3991
3992 anchor_items.push(OutlineItem {
3993 depth: item_ends_stack.len(),
3994 range: range_callback(self, item.range.clone()),
3995 source_range_for_text: range_callback(self, item.source_range_for_text.clone()),
3996 text: item.text,
3997 highlight_ranges: item.highlight_ranges,
3998 name_ranges: item.name_ranges,
3999 body_range: item.body_range.map(|r| range_callback(self, r)),
4000 annotation_range: annotation_row_range.map(|annotation_range| {
4001 let point_range = Point::new(annotation_range.start, 0)
4002 ..Point::new(annotation_range.end, self.line_len(annotation_range.end));
4003 range_callback(self, point_range)
4004 }),
4005 });
4006 item_ends_stack.push(item.range.end);
4007 }
4008
4009 anchor_items
4010 }
4011
4012 fn next_outline_item(
4013 &self,
4014 config: &OutlineConfig,
4015 mat: &SyntaxMapMatch,
4016 range: &Range<usize>,
4017 include_extra_context: bool,
4018 theme: Option<&SyntaxTheme>,
4019 ) -> Option<OutlineItem<Point>> {
4020 let item_node = mat.captures.iter().find_map(|cap| {
4021 if cap.index == config.item_capture_ix {
4022 Some(cap.node)
4023 } else {
4024 None
4025 }
4026 })?;
4027
4028 let item_byte_range = item_node.byte_range();
4029 if item_byte_range.end < range.start || item_byte_range.start > range.end {
4030 return None;
4031 }
4032 let item_point_range = Point::from_ts_point(item_node.start_position())
4033 ..Point::from_ts_point(item_node.end_position());
4034
4035 let mut open_point = None;
4036 let mut close_point = None;
4037
4038 let mut buffer_ranges = Vec::new();
4039 let mut add_to_buffer_ranges = |node: tree_sitter::Node, node_is_name| {
4040 let mut range = node.start_byte()..node.end_byte();
4041 let start = node.start_position();
4042 if node.end_position().row > start.row {
4043 range.end = range.start + self.line_len(start.row as u32) as usize - start.column;
4044 }
4045
4046 if !range.is_empty() {
4047 buffer_ranges.push((range, node_is_name));
4048 }
4049 };
4050
4051 for capture in mat.captures {
4052 if capture.index == config.name_capture_ix {
4053 add_to_buffer_ranges(capture.node, true);
4054 } else if Some(capture.index) == config.context_capture_ix
4055 || (Some(capture.index) == config.extra_context_capture_ix && include_extra_context)
4056 {
4057 add_to_buffer_ranges(capture.node, false);
4058 } else {
4059 if Some(capture.index) == config.open_capture_ix {
4060 open_point = Some(Point::from_ts_point(capture.node.end_position()));
4061 } else if Some(capture.index) == config.close_capture_ix {
4062 close_point = Some(Point::from_ts_point(capture.node.start_position()));
4063 }
4064 }
4065 }
4066
4067 if buffer_ranges.is_empty() {
4068 return None;
4069 }
4070 let source_range_for_text =
4071 buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end;
4072
4073 let mut text = String::new();
4074 let mut highlight_ranges = Vec::new();
4075 let mut name_ranges = Vec::new();
4076 let mut chunks = self.chunks(source_range_for_text.clone(), true);
4077 let mut last_buffer_range_end = 0;
4078 for (buffer_range, is_name) in buffer_ranges {
4079 let space_added = !text.is_empty() && buffer_range.start > last_buffer_range_end;
4080 if space_added {
4081 text.push(' ');
4082 }
4083 let before_append_len = text.len();
4084 let mut offset = buffer_range.start;
4085 chunks.seek(buffer_range.clone());
4086 for mut chunk in chunks.by_ref() {
4087 if chunk.text.len() > buffer_range.end - offset {
4088 chunk.text = &chunk.text[0..(buffer_range.end - offset)];
4089 offset = buffer_range.end;
4090 } else {
4091 offset += chunk.text.len();
4092 }
4093 let style = chunk
4094 .syntax_highlight_id
4095 .zip(theme)
4096 .and_then(|(highlight, theme)| highlight.style(theme));
4097 if let Some(style) = style {
4098 let start = text.len();
4099 let end = start + chunk.text.len();
4100 highlight_ranges.push((start..end, style));
4101 }
4102 text.push_str(chunk.text);
4103 if offset >= buffer_range.end {
4104 break;
4105 }
4106 }
4107 if is_name {
4108 let after_append_len = text.len();
4109 let start = if space_added && !name_ranges.is_empty() {
4110 before_append_len - 1
4111 } else {
4112 before_append_len
4113 };
4114 name_ranges.push(start..after_append_len);
4115 }
4116 last_buffer_range_end = buffer_range.end;
4117 }
4118
4119 Some(OutlineItem {
4120 depth: 0, // We'll calculate the depth later
4121 range: item_point_range,
4122 source_range_for_text: source_range_for_text.to_point(self),
4123 text,
4124 highlight_ranges,
4125 name_ranges,
4126 body_range: open_point.zip(close_point).map(|(start, end)| start..end),
4127 annotation_range: None,
4128 })
4129 }
4130
4131 pub fn function_body_fold_ranges<T: ToOffset>(
4132 &self,
4133 within: Range<T>,
4134 ) -> impl Iterator<Item = Range<usize>> + '_ {
4135 self.text_object_ranges(within, TreeSitterOptions::default())
4136 .filter_map(|(range, obj)| (obj == TextObject::InsideFunction).then_some(range))
4137 }
4138
4139 /// For each grammar in the language, runs the provided
4140 /// [`tree_sitter::Query`] against the given range.
4141 pub fn matches(
4142 &self,
4143 range: Range<usize>,
4144 query: fn(&Grammar) -> Option<&tree_sitter::Query>,
4145 ) -> SyntaxMapMatches<'_> {
4146 self.syntax.matches(range, self, query)
4147 }
4148
4149 /// Finds all [`RowChunks`] applicable to the given range, then returns all bracket pairs that intersect with those chunks.
4150 /// Hence, may return more bracket pairs than the range contains.
4151 ///
4152 /// Will omit known chunks.
4153 /// The resulting bracket match collections are not ordered.
4154 pub fn fetch_bracket_ranges(
4155 &self,
4156 range: Range<usize>,
4157 known_chunks: Option<(&Global, &HashSet<Range<BufferRow>>)>,
4158 ) -> HashMap<Range<BufferRow>, Vec<BracketMatch>> {
4159 let mut tree_sitter_data = self.latest_tree_sitter_data().clone();
4160
4161 let known_chunks = match known_chunks {
4162 Some((known_version, known_chunks)) => {
4163 if !tree_sitter_data
4164 .chunks
4165 .version()
4166 .changed_since(known_version)
4167 {
4168 known_chunks.clone()
4169 } else {
4170 HashSet::default()
4171 }
4172 }
4173 None => HashSet::default(),
4174 };
4175
4176 let mut new_bracket_matches = HashMap::default();
4177 let mut all_bracket_matches = HashMap::default();
4178
4179 for chunk in tree_sitter_data
4180 .chunks
4181 .applicable_chunks(&[self.anchor_before(range.start)..self.anchor_after(range.end)])
4182 {
4183 if known_chunks.contains(&chunk.row_range()) {
4184 continue;
4185 }
4186 let Some(chunk_range) = tree_sitter_data.chunks.chunk_range(chunk) else {
4187 continue;
4188 };
4189 let chunk_range = chunk_range.to_offset(&tree_sitter_data.chunks.snapshot);
4190
4191 let bracket_matches = match tree_sitter_data.brackets_by_chunks[chunk.id].take() {
4192 Some(cached_brackets) => cached_brackets,
4193 None => {
4194 let mut matches =
4195 self.syntax
4196 .matches(chunk_range.clone(), &self.text, |grammar| {
4197 grammar.brackets_config.as_ref().map(|c| &c.query)
4198 });
4199 let configs = matches
4200 .grammars()
4201 .iter()
4202 .map(|grammar| grammar.brackets_config.as_ref().unwrap())
4203 .collect::<Vec<_>>();
4204
4205 // todo! this seems like a wrong parameter: instead, use chunk range, `Range<BufferRow>`, as a key part + add bracket_id that will be used for each bracket
4206 let mut depth = 0;
4207 let chunk_range = chunk_range.clone();
4208 let new_matches = iter::from_fn(move || {
4209 while let Some(mat) = matches.peek() {
4210 let mut open = None;
4211 let mut close = None;
4212 let config = configs[mat.grammar_index];
4213 let pattern = &config.patterns[mat.pattern_index];
4214 for capture in mat.captures {
4215 if capture.index == config.open_capture_ix {
4216 open = Some(capture.node.byte_range());
4217 } else if capture.index == config.close_capture_ix {
4218 close = Some(capture.node.byte_range());
4219 }
4220 }
4221
4222 matches.advance();
4223
4224 let Some((open_range, close_range)) = open.zip(close) else {
4225 continue;
4226 };
4227
4228 let bracket_range = open_range.start..=close_range.end;
4229 if !bracket_range.overlaps(&chunk_range) {
4230 continue;
4231 }
4232
4233 depth += 1;
4234
4235 return Some(BracketMatch {
4236 open_range,
4237 close_range,
4238 newline_only: pattern.newline_only,
4239 depth,
4240 });
4241 }
4242 None
4243 })
4244 .collect::<Vec<_>>();
4245
4246 new_bracket_matches.insert(chunk.id, new_matches.clone());
4247 new_matches
4248 }
4249 };
4250 all_bracket_matches.insert(chunk.row_range(), bracket_matches);
4251 }
4252
4253 let mut latest_tree_sitter_data = self.latest_tree_sitter_data();
4254 if latest_tree_sitter_data.chunks.version() == &self.version {
4255 for (chunk_id, new_matches) in new_bracket_matches {
4256 let old_chunks = &mut latest_tree_sitter_data.brackets_by_chunks[chunk_id];
4257 if old_chunks.is_none() {
4258 *old_chunks = Some(new_matches);
4259 }
4260 }
4261 }
4262
4263 all_bracket_matches
4264 }
4265
4266 fn latest_tree_sitter_data(&self) -> MutexGuard<'_, RawMutex, TreeSitterData> {
4267 let mut tree_sitter_data = self.tree_sitter_data.lock();
4268 if self
4269 .version
4270 .changed_since(tree_sitter_data.chunks.version())
4271 {
4272 *tree_sitter_data = TreeSitterData::new(self.text.clone());
4273 }
4274 tree_sitter_data
4275 }
4276
4277 pub fn all_bracket_ranges(&self, range: Range<usize>) -> impl Iterator<Item = BracketMatch> {
4278 self.fetch_bracket_ranges(range.clone(), None)
4279 .into_values()
4280 .flatten()
4281 .filter(move |bracket_match| {
4282 let bracket_range = bracket_match.open_range.start..=bracket_match.close_range.end;
4283 bracket_range.overlaps(&range)
4284 })
4285 }
4286
4287 /// Returns bracket range pairs overlapping or adjacent to `range`
4288 pub fn bracket_ranges<T: ToOffset>(
4289 &self,
4290 range: Range<T>,
4291 ) -> impl Iterator<Item = BracketMatch> + '_ {
4292 // Find bracket pairs that *inclusively* contain the given range.
4293 let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self);
4294 self.all_bracket_ranges(range)
4295 .filter(|pair| !pair.newline_only)
4296 }
4297
4298 pub fn debug_variables_query<T: ToOffset>(
4299 &self,
4300 range: Range<T>,
4301 ) -> impl Iterator<Item = (Range<usize>, DebuggerTextObject)> + '_ {
4302 let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self);
4303
4304 let mut matches = self.syntax.matches_with_options(
4305 range.clone(),
4306 &self.text,
4307 TreeSitterOptions::default(),
4308 |grammar| grammar.debug_variables_config.as_ref().map(|c| &c.query),
4309 );
4310
4311 let configs = matches
4312 .grammars()
4313 .iter()
4314 .map(|grammar| grammar.debug_variables_config.as_ref())
4315 .collect::<Vec<_>>();
4316
4317 let mut captures = Vec::<(Range<usize>, DebuggerTextObject)>::new();
4318
4319 iter::from_fn(move || {
4320 loop {
4321 while let Some(capture) = captures.pop() {
4322 if capture.0.overlaps(&range) {
4323 return Some(capture);
4324 }
4325 }
4326
4327 let mat = matches.peek()?;
4328
4329 let Some(config) = configs[mat.grammar_index].as_ref() else {
4330 matches.advance();
4331 continue;
4332 };
4333
4334 for capture in mat.captures {
4335 let Some(ix) = config
4336 .objects_by_capture_ix
4337 .binary_search_by_key(&capture.index, |e| e.0)
4338 .ok()
4339 else {
4340 continue;
4341 };
4342 let text_object = config.objects_by_capture_ix[ix].1;
4343 let byte_range = capture.node.byte_range();
4344
4345 let mut found = false;
4346 for (range, existing) in captures.iter_mut() {
4347 if existing == &text_object {
4348 range.start = range.start.min(byte_range.start);
4349 range.end = range.end.max(byte_range.end);
4350 found = true;
4351 break;
4352 }
4353 }
4354
4355 if !found {
4356 captures.push((byte_range, text_object));
4357 }
4358 }
4359
4360 matches.advance();
4361 }
4362 })
4363 }
4364
4365 pub fn text_object_ranges<T: ToOffset>(
4366 &self,
4367 range: Range<T>,
4368 options: TreeSitterOptions,
4369 ) -> impl Iterator<Item = (Range<usize>, TextObject)> + '_ {
4370 let range =
4371 range.start.to_previous_offset(self)..self.len().min(range.end.to_next_offset(self));
4372
4373 let mut matches =
4374 self.syntax
4375 .matches_with_options(range.clone(), &self.text, options, |grammar| {
4376 grammar.text_object_config.as_ref().map(|c| &c.query)
4377 });
4378
4379 let configs = matches
4380 .grammars()
4381 .iter()
4382 .map(|grammar| grammar.text_object_config.as_ref())
4383 .collect::<Vec<_>>();
4384
4385 let mut captures = Vec::<(Range<usize>, TextObject)>::new();
4386
4387 iter::from_fn(move || {
4388 loop {
4389 while let Some(capture) = captures.pop() {
4390 if capture.0.overlaps(&range) {
4391 return Some(capture);
4392 }
4393 }
4394
4395 let mat = matches.peek()?;
4396
4397 let Some(config) = configs[mat.grammar_index].as_ref() else {
4398 matches.advance();
4399 continue;
4400 };
4401
4402 for capture in mat.captures {
4403 let Some(ix) = config
4404 .text_objects_by_capture_ix
4405 .binary_search_by_key(&capture.index, |e| e.0)
4406 .ok()
4407 else {
4408 continue;
4409 };
4410 let text_object = config.text_objects_by_capture_ix[ix].1;
4411 let byte_range = capture.node.byte_range();
4412
4413 let mut found = false;
4414 for (range, existing) in captures.iter_mut() {
4415 if existing == &text_object {
4416 range.start = range.start.min(byte_range.start);
4417 range.end = range.end.max(byte_range.end);
4418 found = true;
4419 break;
4420 }
4421 }
4422
4423 if !found {
4424 captures.push((byte_range, text_object));
4425 }
4426 }
4427
4428 matches.advance();
4429 }
4430 })
4431 }
4432
4433 /// Returns enclosing bracket ranges containing the given range
4434 pub fn enclosing_bracket_ranges<T: ToOffset>(
4435 &self,
4436 range: Range<T>,
4437 ) -> impl Iterator<Item = BracketMatch> + '_ {
4438 let range = range.start.to_offset(self)..range.end.to_offset(self);
4439
4440 self.bracket_ranges(range.clone()).filter(move |pair| {
4441 pair.open_range.start <= range.start && pair.close_range.end >= range.end
4442 })
4443 }
4444
4445 /// Returns the smallest enclosing bracket ranges containing the given range or None if no brackets contain range
4446 ///
4447 /// Can optionally pass a range_filter to filter the ranges of brackets to consider
4448 pub fn innermost_enclosing_bracket_ranges<T: ToOffset>(
4449 &self,
4450 range: Range<T>,
4451 range_filter: Option<&dyn Fn(Range<usize>, Range<usize>) -> bool>,
4452 ) -> Option<(Range<usize>, Range<usize>)> {
4453 let range = range.start.to_offset(self)..range.end.to_offset(self);
4454
4455 // Get the ranges of the innermost pair of brackets.
4456 let mut result: Option<(Range<usize>, Range<usize>)> = None;
4457
4458 for pair in self.enclosing_bracket_ranges(range) {
4459 if let Some(range_filter) = range_filter
4460 && !range_filter(pair.open_range.clone(), pair.close_range.clone())
4461 {
4462 continue;
4463 }
4464
4465 let len = pair.close_range.end - pair.open_range.start;
4466
4467 if let Some((existing_open, existing_close)) = &result {
4468 let existing_len = existing_close.end - existing_open.start;
4469 if len > existing_len {
4470 continue;
4471 }
4472 }
4473
4474 result = Some((pair.open_range, pair.close_range));
4475 }
4476
4477 result
4478 }
4479
4480 /// Returns anchor ranges for any matches of the redaction query.
4481 /// The buffer can be associated with multiple languages, and the redaction query associated with each
4482 /// will be run on the relevant section of the buffer.
4483 pub fn redacted_ranges<T: ToOffset>(
4484 &self,
4485 range: Range<T>,
4486 ) -> impl Iterator<Item = Range<usize>> + '_ {
4487 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4488 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4489 grammar
4490 .redactions_config
4491 .as_ref()
4492 .map(|config| &config.query)
4493 });
4494
4495 let configs = syntax_matches
4496 .grammars()
4497 .iter()
4498 .map(|grammar| grammar.redactions_config.as_ref())
4499 .collect::<Vec<_>>();
4500
4501 iter::from_fn(move || {
4502 let redacted_range = syntax_matches
4503 .peek()
4504 .and_then(|mat| {
4505 configs[mat.grammar_index].and_then(|config| {
4506 mat.captures
4507 .iter()
4508 .find(|capture| capture.index == config.redaction_capture_ix)
4509 })
4510 })
4511 .map(|mat| mat.node.byte_range());
4512 syntax_matches.advance();
4513 redacted_range
4514 })
4515 }
4516
4517 pub fn injections_intersecting_range<T: ToOffset>(
4518 &self,
4519 range: Range<T>,
4520 ) -> impl Iterator<Item = (Range<usize>, &Arc<Language>)> + '_ {
4521 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4522
4523 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4524 grammar
4525 .injection_config
4526 .as_ref()
4527 .map(|config| &config.query)
4528 });
4529
4530 let configs = syntax_matches
4531 .grammars()
4532 .iter()
4533 .map(|grammar| grammar.injection_config.as_ref())
4534 .collect::<Vec<_>>();
4535
4536 iter::from_fn(move || {
4537 let ranges = syntax_matches.peek().and_then(|mat| {
4538 let config = &configs[mat.grammar_index]?;
4539 let content_capture_range = mat.captures.iter().find_map(|capture| {
4540 if capture.index == config.content_capture_ix {
4541 Some(capture.node.byte_range())
4542 } else {
4543 None
4544 }
4545 })?;
4546 let language = self.language_at(content_capture_range.start)?;
4547 Some((content_capture_range, language))
4548 });
4549 syntax_matches.advance();
4550 ranges
4551 })
4552 }
4553
4554 pub fn runnable_ranges(
4555 &self,
4556 offset_range: Range<usize>,
4557 ) -> impl Iterator<Item = RunnableRange> + '_ {
4558 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4559 grammar.runnable_config.as_ref().map(|config| &config.query)
4560 });
4561
4562 let test_configs = syntax_matches
4563 .grammars()
4564 .iter()
4565 .map(|grammar| grammar.runnable_config.as_ref())
4566 .collect::<Vec<_>>();
4567
4568 iter::from_fn(move || {
4569 loop {
4570 let mat = syntax_matches.peek()?;
4571
4572 let test_range = test_configs[mat.grammar_index].and_then(|test_configs| {
4573 let mut run_range = None;
4574 let full_range = mat.captures.iter().fold(
4575 Range {
4576 start: usize::MAX,
4577 end: 0,
4578 },
4579 |mut acc, next| {
4580 let byte_range = next.node.byte_range();
4581 if acc.start > byte_range.start {
4582 acc.start = byte_range.start;
4583 }
4584 if acc.end < byte_range.end {
4585 acc.end = byte_range.end;
4586 }
4587 acc
4588 },
4589 );
4590 if full_range.start > full_range.end {
4591 // We did not find a full spanning range of this match.
4592 return None;
4593 }
4594 let extra_captures: SmallVec<[_; 1]> =
4595 SmallVec::from_iter(mat.captures.iter().filter_map(|capture| {
4596 test_configs
4597 .extra_captures
4598 .get(capture.index as usize)
4599 .cloned()
4600 .and_then(|tag_name| match tag_name {
4601 RunnableCapture::Named(name) => {
4602 Some((capture.node.byte_range(), name))
4603 }
4604 RunnableCapture::Run => {
4605 let _ = run_range.insert(capture.node.byte_range());
4606 None
4607 }
4608 })
4609 }));
4610 let run_range = run_range?;
4611 let tags = test_configs
4612 .query
4613 .property_settings(mat.pattern_index)
4614 .iter()
4615 .filter_map(|property| {
4616 if *property.key == *"tag" {
4617 property
4618 .value
4619 .as_ref()
4620 .map(|value| RunnableTag(value.to_string().into()))
4621 } else {
4622 None
4623 }
4624 })
4625 .collect();
4626 let extra_captures = extra_captures
4627 .into_iter()
4628 .map(|(range, name)| {
4629 (
4630 name.to_string(),
4631 self.text_for_range(range).collect::<String>(),
4632 )
4633 })
4634 .collect();
4635 // All tags should have the same range.
4636 Some(RunnableRange {
4637 run_range,
4638 full_range,
4639 runnable: Runnable {
4640 tags,
4641 language: mat.language,
4642 buffer: self.remote_id(),
4643 },
4644 extra_captures,
4645 buffer_id: self.remote_id(),
4646 })
4647 });
4648
4649 syntax_matches.advance();
4650 if test_range.is_some() {
4651 // It's fine for us to short-circuit on .peek()? returning None. We don't want to return None from this iter if we
4652 // had a capture that did not contain a run marker, hence we'll just loop around for the next capture.
4653 return test_range;
4654 }
4655 }
4656 })
4657 }
4658
4659 /// Returns selections for remote peers intersecting the given range.
4660 #[allow(clippy::type_complexity)]
4661 pub fn selections_in_range(
4662 &self,
4663 range: Range<Anchor>,
4664 include_local: bool,
4665 ) -> impl Iterator<
4666 Item = (
4667 ReplicaId,
4668 bool,
4669 CursorShape,
4670 impl Iterator<Item = &Selection<Anchor>> + '_,
4671 ),
4672 > + '_ {
4673 self.remote_selections
4674 .iter()
4675 .filter(move |(replica_id, set)| {
4676 (include_local || **replica_id != self.text.replica_id())
4677 && !set.selections.is_empty()
4678 })
4679 .map(move |(replica_id, set)| {
4680 let start_ix = match set.selections.binary_search_by(|probe| {
4681 probe.end.cmp(&range.start, self).then(Ordering::Greater)
4682 }) {
4683 Ok(ix) | Err(ix) => ix,
4684 };
4685 let end_ix = match set.selections.binary_search_by(|probe| {
4686 probe.start.cmp(&range.end, self).then(Ordering::Less)
4687 }) {
4688 Ok(ix) | Err(ix) => ix,
4689 };
4690
4691 (
4692 *replica_id,
4693 set.line_mode,
4694 set.cursor_shape,
4695 set.selections[start_ix..end_ix].iter(),
4696 )
4697 })
4698 }
4699
4700 /// Returns if the buffer contains any diagnostics.
4701 pub fn has_diagnostics(&self) -> bool {
4702 !self.diagnostics.is_empty()
4703 }
4704
4705 /// Returns all the diagnostics intersecting the given range.
4706 pub fn diagnostics_in_range<'a, T, O>(
4707 &'a self,
4708 search_range: Range<T>,
4709 reversed: bool,
4710 ) -> impl 'a + Iterator<Item = DiagnosticEntryRef<'a, O>>
4711 where
4712 T: 'a + Clone + ToOffset,
4713 O: 'a + FromAnchor,
4714 {
4715 let mut iterators: Vec<_> = self
4716 .diagnostics
4717 .iter()
4718 .map(|(_, collection)| {
4719 collection
4720 .range::<T, text::Anchor>(search_range.clone(), self, true, reversed)
4721 .peekable()
4722 })
4723 .collect();
4724
4725 std::iter::from_fn(move || {
4726 let (next_ix, _) = iterators
4727 .iter_mut()
4728 .enumerate()
4729 .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
4730 .min_by(|(_, a), (_, b)| {
4731 let cmp = a
4732 .range
4733 .start
4734 .cmp(&b.range.start, self)
4735 // when range is equal, sort by diagnostic severity
4736 .then(a.diagnostic.severity.cmp(&b.diagnostic.severity))
4737 // and stabilize order with group_id
4738 .then(a.diagnostic.group_id.cmp(&b.diagnostic.group_id));
4739 if reversed { cmp.reverse() } else { cmp }
4740 })?;
4741 iterators[next_ix]
4742 .next()
4743 .map(
4744 |DiagnosticEntryRef { range, diagnostic }| DiagnosticEntryRef {
4745 diagnostic,
4746 range: FromAnchor::from_anchor(&range.start, self)
4747 ..FromAnchor::from_anchor(&range.end, self),
4748 },
4749 )
4750 })
4751 }
4752
4753 /// Raw access to the diagnostic sets. Typically `diagnostic_groups` or `diagnostic_group`
4754 /// should be used instead.
4755 pub fn diagnostic_sets(&self) -> &SmallVec<[(LanguageServerId, DiagnosticSet); 2]> {
4756 &self.diagnostics
4757 }
4758
4759 /// Returns all the diagnostic groups associated with the given
4760 /// language server ID. If no language server ID is provided,
4761 /// all diagnostics groups are returned.
4762 pub fn diagnostic_groups(
4763 &self,
4764 language_server_id: Option<LanguageServerId>,
4765 ) -> Vec<(LanguageServerId, DiagnosticGroup<'_, Anchor>)> {
4766 let mut groups = Vec::new();
4767
4768 if let Some(language_server_id) = language_server_id {
4769 if let Ok(ix) = self
4770 .diagnostics
4771 .binary_search_by_key(&language_server_id, |e| e.0)
4772 {
4773 self.diagnostics[ix]
4774 .1
4775 .groups(language_server_id, &mut groups, self);
4776 }
4777 } else {
4778 for (language_server_id, diagnostics) in self.diagnostics.iter() {
4779 diagnostics.groups(*language_server_id, &mut groups, self);
4780 }
4781 }
4782
4783 groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
4784 let a_start = &group_a.entries[group_a.primary_ix].range.start;
4785 let b_start = &group_b.entries[group_b.primary_ix].range.start;
4786 a_start.cmp(b_start, self).then_with(|| id_a.cmp(id_b))
4787 });
4788
4789 groups
4790 }
4791
4792 /// Returns an iterator over the diagnostics for the given group.
4793 pub fn diagnostic_group<O>(
4794 &self,
4795 group_id: usize,
4796 ) -> impl Iterator<Item = DiagnosticEntryRef<'_, O>> + use<'_, O>
4797 where
4798 O: FromAnchor + 'static,
4799 {
4800 self.diagnostics
4801 .iter()
4802 .flat_map(move |(_, set)| set.group(group_id, self))
4803 }
4804
4805 /// An integer version number that accounts for all updates besides
4806 /// the buffer's text itself (which is versioned via a version vector).
4807 pub fn non_text_state_update_count(&self) -> usize {
4808 self.non_text_state_update_count
4809 }
4810
4811 /// An integer version that changes when the buffer's syntax changes.
4812 pub fn syntax_update_count(&self) -> usize {
4813 self.syntax.update_count()
4814 }
4815
4816 /// Returns a snapshot of underlying file.
4817 pub fn file(&self) -> Option<&Arc<dyn File>> {
4818 self.file.as_ref()
4819 }
4820
4821 pub fn resolve_file_path(&self, include_root: bool, cx: &App) -> Option<String> {
4822 if let Some(file) = self.file() {
4823 if file.path().file_name().is_none() || include_root {
4824 Some(file.full_path(cx).to_string_lossy().into_owned())
4825 } else {
4826 Some(file.path().display(file.path_style(cx)).to_string())
4827 }
4828 } else {
4829 None
4830 }
4831 }
4832
4833 pub fn words_in_range(&self, query: WordsQuery) -> BTreeMap<String, Range<Anchor>> {
4834 let query_str = query.fuzzy_contents;
4835 if query_str.is_some_and(|query| query.is_empty()) {
4836 return BTreeMap::default();
4837 }
4838
4839 let classifier = CharClassifier::new(self.language.clone().map(|language| LanguageScope {
4840 language,
4841 override_id: None,
4842 }));
4843
4844 let mut query_ix = 0;
4845 let query_chars = query_str.map(|query| query.chars().collect::<Vec<_>>());
4846 let query_len = query_chars.as_ref().map_or(0, |query| query.len());
4847
4848 let mut words = BTreeMap::default();
4849 let mut current_word_start_ix = None;
4850 let mut chunk_ix = query.range.start;
4851 for chunk in self.chunks(query.range, false) {
4852 for (i, c) in chunk.text.char_indices() {
4853 let ix = chunk_ix + i;
4854 if classifier.is_word(c) {
4855 if current_word_start_ix.is_none() {
4856 current_word_start_ix = Some(ix);
4857 }
4858
4859 if let Some(query_chars) = &query_chars
4860 && query_ix < query_len
4861 && c.to_lowercase().eq(query_chars[query_ix].to_lowercase())
4862 {
4863 query_ix += 1;
4864 }
4865 continue;
4866 } else if let Some(word_start) = current_word_start_ix.take()
4867 && query_ix == query_len
4868 {
4869 let word_range = self.anchor_before(word_start)..self.anchor_after(ix);
4870 let mut word_text = self.text_for_range(word_start..ix).peekable();
4871 let first_char = word_text
4872 .peek()
4873 .and_then(|first_chunk| first_chunk.chars().next());
4874 // Skip empty and "words" starting with digits as a heuristic to reduce useless completions
4875 if !query.skip_digits
4876 || first_char.is_none_or(|first_char| !first_char.is_digit(10))
4877 {
4878 words.insert(word_text.collect(), word_range);
4879 }
4880 }
4881 query_ix = 0;
4882 }
4883 chunk_ix += chunk.text.len();
4884 }
4885
4886 words
4887 }
4888}
4889
4890pub struct WordsQuery<'a> {
4891 /// Only returns words with all chars from the fuzzy string in them.
4892 pub fuzzy_contents: Option<&'a str>,
4893 /// Skips words that start with a digit.
4894 pub skip_digits: bool,
4895 /// Buffer offset range, to look for words.
4896 pub range: Range<usize>,
4897}
4898
4899fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
4900 indent_size_for_text(text.chars_at(Point::new(row, 0)))
4901}
4902
4903fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
4904 let mut result = IndentSize::spaces(0);
4905 for c in text {
4906 let kind = match c {
4907 ' ' => IndentKind::Space,
4908 '\t' => IndentKind::Tab,
4909 _ => break,
4910 };
4911 if result.len == 0 {
4912 result.kind = kind;
4913 }
4914 result.len += 1;
4915 }
4916 result
4917}
4918
4919impl Clone for BufferSnapshot {
4920 fn clone(&self) -> Self {
4921 Self {
4922 text: self.text.clone(),
4923 syntax: self.syntax.clone(),
4924 file: self.file.clone(),
4925 remote_selections: self.remote_selections.clone(),
4926 diagnostics: self.diagnostics.clone(),
4927 language: self.language.clone(),
4928 tree_sitter_data: self.tree_sitter_data.clone(),
4929 non_text_state_update_count: self.non_text_state_update_count,
4930 }
4931 }
4932}
4933
4934impl Deref for BufferSnapshot {
4935 type Target = text::BufferSnapshot;
4936
4937 fn deref(&self) -> &Self::Target {
4938 &self.text
4939 }
4940}
4941
4942unsafe impl Send for BufferChunks<'_> {}
4943
4944impl<'a> BufferChunks<'a> {
4945 pub(crate) fn new(
4946 text: &'a Rope,
4947 range: Range<usize>,
4948 syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
4949 diagnostics: bool,
4950 buffer_snapshot: Option<&'a BufferSnapshot>,
4951 ) -> Self {
4952 let mut highlights = None;
4953 if let Some((captures, highlight_maps)) = syntax {
4954 highlights = Some(BufferChunkHighlights {
4955 captures,
4956 next_capture: None,
4957 stack: Default::default(),
4958 highlight_maps,
4959 })
4960 }
4961
4962 let diagnostic_endpoints = diagnostics.then(|| Vec::new().into_iter().peekable());
4963 let chunks = text.chunks_in_range(range.clone());
4964
4965 let mut this = BufferChunks {
4966 range,
4967 buffer_snapshot,
4968 chunks,
4969 diagnostic_endpoints,
4970 error_depth: 0,
4971 warning_depth: 0,
4972 information_depth: 0,
4973 hint_depth: 0,
4974 unnecessary_depth: 0,
4975 underline: true,
4976 highlights,
4977 };
4978 this.initialize_diagnostic_endpoints();
4979 this
4980 }
4981
4982 /// Seeks to the given byte offset in the buffer.
4983 pub fn seek(&mut self, range: Range<usize>) {
4984 let old_range = std::mem::replace(&mut self.range, range.clone());
4985 self.chunks.set_range(self.range.clone());
4986 if let Some(highlights) = self.highlights.as_mut() {
4987 if old_range.start <= self.range.start && old_range.end >= self.range.end {
4988 // Reuse existing highlights stack, as the new range is a subrange of the old one.
4989 highlights
4990 .stack
4991 .retain(|(end_offset, _)| *end_offset > range.start);
4992 if let Some(capture) = &highlights.next_capture
4993 && range.start >= capture.node.start_byte()
4994 {
4995 let next_capture_end = capture.node.end_byte();
4996 if range.start < next_capture_end {
4997 highlights.stack.push((
4998 next_capture_end,
4999 highlights.highlight_maps[capture.grammar_index].get(capture.index),
5000 ));
5001 }
5002 highlights.next_capture.take();
5003 }
5004 } else if let Some(snapshot) = self.buffer_snapshot {
5005 let (captures, highlight_maps) = snapshot.get_highlights(self.range.clone());
5006 *highlights = BufferChunkHighlights {
5007 captures,
5008 next_capture: None,
5009 stack: Default::default(),
5010 highlight_maps,
5011 };
5012 } else {
5013 // We cannot obtain new highlights for a language-aware buffer iterator, as we don't have a buffer snapshot.
5014 // Seeking such BufferChunks is not supported.
5015 debug_assert!(
5016 false,
5017 "Attempted to seek on a language-aware buffer iterator without associated buffer snapshot"
5018 );
5019 }
5020
5021 highlights.captures.set_byte_range(self.range.clone());
5022 self.initialize_diagnostic_endpoints();
5023 }
5024 }
5025
5026 fn initialize_diagnostic_endpoints(&mut self) {
5027 if let Some(diagnostics) = self.diagnostic_endpoints.as_mut()
5028 && let Some(buffer) = self.buffer_snapshot
5029 {
5030 let mut diagnostic_endpoints = Vec::new();
5031 for entry in buffer.diagnostics_in_range::<_, usize>(self.range.clone(), false) {
5032 diagnostic_endpoints.push(DiagnosticEndpoint {
5033 offset: entry.range.start,
5034 is_start: true,
5035 severity: entry.diagnostic.severity,
5036 is_unnecessary: entry.diagnostic.is_unnecessary,
5037 underline: entry.diagnostic.underline,
5038 });
5039 diagnostic_endpoints.push(DiagnosticEndpoint {
5040 offset: entry.range.end,
5041 is_start: false,
5042 severity: entry.diagnostic.severity,
5043 is_unnecessary: entry.diagnostic.is_unnecessary,
5044 underline: entry.diagnostic.underline,
5045 });
5046 }
5047 diagnostic_endpoints
5048 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
5049 *diagnostics = diagnostic_endpoints.into_iter().peekable();
5050 self.hint_depth = 0;
5051 self.error_depth = 0;
5052 self.warning_depth = 0;
5053 self.information_depth = 0;
5054 }
5055 }
5056
5057 /// The current byte offset in the buffer.
5058 pub fn offset(&self) -> usize {
5059 self.range.start
5060 }
5061
5062 pub fn range(&self) -> Range<usize> {
5063 self.range.clone()
5064 }
5065
5066 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
5067 let depth = match endpoint.severity {
5068 DiagnosticSeverity::ERROR => &mut self.error_depth,
5069 DiagnosticSeverity::WARNING => &mut self.warning_depth,
5070 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
5071 DiagnosticSeverity::HINT => &mut self.hint_depth,
5072 _ => return,
5073 };
5074 if endpoint.is_start {
5075 *depth += 1;
5076 } else {
5077 *depth -= 1;
5078 }
5079
5080 if endpoint.is_unnecessary {
5081 if endpoint.is_start {
5082 self.unnecessary_depth += 1;
5083 } else {
5084 self.unnecessary_depth -= 1;
5085 }
5086 }
5087 }
5088
5089 fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
5090 if self.error_depth > 0 {
5091 Some(DiagnosticSeverity::ERROR)
5092 } else if self.warning_depth > 0 {
5093 Some(DiagnosticSeverity::WARNING)
5094 } else if self.information_depth > 0 {
5095 Some(DiagnosticSeverity::INFORMATION)
5096 } else if self.hint_depth > 0 {
5097 Some(DiagnosticSeverity::HINT)
5098 } else {
5099 None
5100 }
5101 }
5102
5103 fn current_code_is_unnecessary(&self) -> bool {
5104 self.unnecessary_depth > 0
5105 }
5106}
5107
5108impl<'a> Iterator for BufferChunks<'a> {
5109 type Item = Chunk<'a>;
5110
5111 fn next(&mut self) -> Option<Self::Item> {
5112 let mut next_capture_start = usize::MAX;
5113 let mut next_diagnostic_endpoint = usize::MAX;
5114
5115 if let Some(highlights) = self.highlights.as_mut() {
5116 while let Some((parent_capture_end, _)) = highlights.stack.last() {
5117 if *parent_capture_end <= self.range.start {
5118 highlights.stack.pop();
5119 } else {
5120 break;
5121 }
5122 }
5123
5124 if highlights.next_capture.is_none() {
5125 highlights.next_capture = highlights.captures.next();
5126 }
5127
5128 while let Some(capture) = highlights.next_capture.as_ref() {
5129 if self.range.start < capture.node.start_byte() {
5130 next_capture_start = capture.node.start_byte();
5131 break;
5132 } else {
5133 let highlight_id =
5134 highlights.highlight_maps[capture.grammar_index].get(capture.index);
5135 highlights
5136 .stack
5137 .push((capture.node.end_byte(), highlight_id));
5138 highlights.next_capture = highlights.captures.next();
5139 }
5140 }
5141 }
5142
5143 let mut diagnostic_endpoints = std::mem::take(&mut self.diagnostic_endpoints);
5144 if let Some(diagnostic_endpoints) = diagnostic_endpoints.as_mut() {
5145 while let Some(endpoint) = diagnostic_endpoints.peek().copied() {
5146 if endpoint.offset <= self.range.start {
5147 self.update_diagnostic_depths(endpoint);
5148 diagnostic_endpoints.next();
5149 self.underline = endpoint.underline;
5150 } else {
5151 next_diagnostic_endpoint = endpoint.offset;
5152 break;
5153 }
5154 }
5155 }
5156 self.diagnostic_endpoints = diagnostic_endpoints;
5157
5158 if let Some(ChunkBitmaps {
5159 text: chunk,
5160 chars: chars_map,
5161 tabs,
5162 }) = self.chunks.peek_with_bitmaps()
5163 {
5164 let chunk_start = self.range.start;
5165 let mut chunk_end = (self.chunks.offset() + chunk.len())
5166 .min(next_capture_start)
5167 .min(next_diagnostic_endpoint);
5168 let mut highlight_id = None;
5169 if let Some(highlights) = self.highlights.as_ref()
5170 && let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last()
5171 {
5172 chunk_end = chunk_end.min(*parent_capture_end);
5173 highlight_id = Some(*parent_highlight_id);
5174 }
5175 let bit_start = chunk_start - self.chunks.offset();
5176 let bit_end = chunk_end - self.chunks.offset();
5177
5178 let slice = &chunk[bit_start..bit_end];
5179
5180 let mask = 1u128.unbounded_shl(bit_end as u32).wrapping_sub(1);
5181 let tabs = (tabs >> bit_start) & mask;
5182 let chars = (chars_map >> bit_start) & mask;
5183
5184 self.range.start = chunk_end;
5185 if self.range.start == self.chunks.offset() + chunk.len() {
5186 self.chunks.next().unwrap();
5187 }
5188
5189 Some(Chunk {
5190 text: slice,
5191 syntax_highlight_id: highlight_id,
5192 underline: self.underline,
5193 diagnostic_severity: self.current_diagnostic_severity(),
5194 is_unnecessary: self.current_code_is_unnecessary(),
5195 tabs,
5196 chars,
5197 ..Chunk::default()
5198 })
5199 } else {
5200 None
5201 }
5202 }
5203}
5204
5205impl operation_queue::Operation for Operation {
5206 fn lamport_timestamp(&self) -> clock::Lamport {
5207 match self {
5208 Operation::Buffer(_) => {
5209 unreachable!("buffer operations should never be deferred at this layer")
5210 }
5211 Operation::UpdateDiagnostics {
5212 lamport_timestamp, ..
5213 }
5214 | Operation::UpdateSelections {
5215 lamport_timestamp, ..
5216 }
5217 | Operation::UpdateCompletionTriggers {
5218 lamport_timestamp, ..
5219 }
5220 | Operation::UpdateLineEnding {
5221 lamport_timestamp, ..
5222 } => *lamport_timestamp,
5223 }
5224 }
5225}
5226
5227impl Default for Diagnostic {
5228 fn default() -> Self {
5229 Self {
5230 source: Default::default(),
5231 source_kind: DiagnosticSourceKind::Other,
5232 code: None,
5233 code_description: None,
5234 severity: DiagnosticSeverity::ERROR,
5235 message: Default::default(),
5236 markdown: None,
5237 group_id: 0,
5238 is_primary: false,
5239 is_disk_based: false,
5240 is_unnecessary: false,
5241 underline: true,
5242 data: None,
5243 }
5244 }
5245}
5246
5247impl IndentSize {
5248 /// Returns an [`IndentSize`] representing the given spaces.
5249 pub fn spaces(len: u32) -> Self {
5250 Self {
5251 len,
5252 kind: IndentKind::Space,
5253 }
5254 }
5255
5256 /// Returns an [`IndentSize`] representing a tab.
5257 pub fn tab() -> Self {
5258 Self {
5259 len: 1,
5260 kind: IndentKind::Tab,
5261 }
5262 }
5263
5264 /// An iterator over the characters represented by this [`IndentSize`].
5265 pub fn chars(&self) -> impl Iterator<Item = char> {
5266 iter::repeat(self.char()).take(self.len as usize)
5267 }
5268
5269 /// The character representation of this [`IndentSize`].
5270 pub fn char(&self) -> char {
5271 match self.kind {
5272 IndentKind::Space => ' ',
5273 IndentKind::Tab => '\t',
5274 }
5275 }
5276
5277 /// Consumes the current [`IndentSize`] and returns a new one that has
5278 /// been shrunk or enlarged by the given size along the given direction.
5279 pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
5280 match direction {
5281 Ordering::Less => {
5282 if self.kind == size.kind && self.len >= size.len {
5283 self.len -= size.len;
5284 }
5285 }
5286 Ordering::Equal => {}
5287 Ordering::Greater => {
5288 if self.len == 0 {
5289 self = size;
5290 } else if self.kind == size.kind {
5291 self.len += size.len;
5292 }
5293 }
5294 }
5295 self
5296 }
5297
5298 pub fn len_with_expanded_tabs(&self, tab_size: NonZeroU32) -> usize {
5299 match self.kind {
5300 IndentKind::Space => self.len as usize,
5301 IndentKind::Tab => self.len as usize * tab_size.get() as usize,
5302 }
5303 }
5304}
5305
5306#[cfg(any(test, feature = "test-support"))]
5307pub struct TestFile {
5308 pub path: Arc<RelPath>,
5309 pub root_name: String,
5310 pub local_root: Option<PathBuf>,
5311}
5312
5313#[cfg(any(test, feature = "test-support"))]
5314impl File for TestFile {
5315 fn path(&self) -> &Arc<RelPath> {
5316 &self.path
5317 }
5318
5319 fn full_path(&self, _: &gpui::App) -> PathBuf {
5320 PathBuf::from(self.root_name.clone()).join(self.path.as_std_path())
5321 }
5322
5323 fn as_local(&self) -> Option<&dyn LocalFile> {
5324 if self.local_root.is_some() {
5325 Some(self)
5326 } else {
5327 None
5328 }
5329 }
5330
5331 fn disk_state(&self) -> DiskState {
5332 unimplemented!()
5333 }
5334
5335 fn file_name<'a>(&'a self, _: &'a gpui::App) -> &'a str {
5336 self.path().file_name().unwrap_or(self.root_name.as_ref())
5337 }
5338
5339 fn worktree_id(&self, _: &App) -> WorktreeId {
5340 WorktreeId::from_usize(0)
5341 }
5342
5343 fn to_proto(&self, _: &App) -> rpc::proto::File {
5344 unimplemented!()
5345 }
5346
5347 fn is_private(&self) -> bool {
5348 false
5349 }
5350
5351 fn path_style(&self, _cx: &App) -> PathStyle {
5352 PathStyle::local()
5353 }
5354}
5355
5356#[cfg(any(test, feature = "test-support"))]
5357impl LocalFile for TestFile {
5358 fn abs_path(&self, _cx: &App) -> PathBuf {
5359 PathBuf::from(self.local_root.as_ref().unwrap())
5360 .join(&self.root_name)
5361 .join(self.path.as_std_path())
5362 }
5363
5364 fn load(&self, _cx: &App) -> Task<Result<String>> {
5365 unimplemented!()
5366 }
5367
5368 fn load_bytes(&self, _cx: &App) -> Task<Result<Vec<u8>>> {
5369 unimplemented!()
5370 }
5371}
5372
5373pub(crate) fn contiguous_ranges(
5374 values: impl Iterator<Item = u32>,
5375 max_len: usize,
5376) -> impl Iterator<Item = Range<u32>> {
5377 let mut values = values;
5378 let mut current_range: Option<Range<u32>> = None;
5379 std::iter::from_fn(move || {
5380 loop {
5381 if let Some(value) = values.next() {
5382 if let Some(range) = &mut current_range
5383 && value == range.end
5384 && range.len() < max_len
5385 {
5386 range.end += 1;
5387 continue;
5388 }
5389
5390 let prev_range = current_range.clone();
5391 current_range = Some(value..(value + 1));
5392 if prev_range.is_some() {
5393 return prev_range;
5394 }
5395 } else {
5396 return current_range.take();
5397 }
5398 }
5399 })
5400}
5401
5402#[derive(Default, Debug)]
5403pub struct CharClassifier {
5404 scope: Option<LanguageScope>,
5405 scope_context: Option<CharScopeContext>,
5406 ignore_punctuation: bool,
5407}
5408
5409impl CharClassifier {
5410 pub fn new(scope: Option<LanguageScope>) -> Self {
5411 Self {
5412 scope,
5413 scope_context: None,
5414 ignore_punctuation: false,
5415 }
5416 }
5417
5418 pub fn scope_context(self, scope_context: Option<CharScopeContext>) -> Self {
5419 Self {
5420 scope_context,
5421 ..self
5422 }
5423 }
5424
5425 pub fn ignore_punctuation(self, ignore_punctuation: bool) -> Self {
5426 Self {
5427 ignore_punctuation,
5428 ..self
5429 }
5430 }
5431
5432 pub fn is_whitespace(&self, c: char) -> bool {
5433 self.kind(c) == CharKind::Whitespace
5434 }
5435
5436 pub fn is_word(&self, c: char) -> bool {
5437 self.kind(c) == CharKind::Word
5438 }
5439
5440 pub fn is_punctuation(&self, c: char) -> bool {
5441 self.kind(c) == CharKind::Punctuation
5442 }
5443
5444 pub fn kind_with(&self, c: char, ignore_punctuation: bool) -> CharKind {
5445 if c.is_alphanumeric() || c == '_' {
5446 return CharKind::Word;
5447 }
5448
5449 if let Some(scope) = &self.scope {
5450 let characters = match self.scope_context {
5451 Some(CharScopeContext::Completion) => scope.completion_query_characters(),
5452 Some(CharScopeContext::LinkedEdit) => scope.linked_edit_characters(),
5453 None => scope.word_characters(),
5454 };
5455 if let Some(characters) = characters
5456 && characters.contains(&c)
5457 {
5458 return CharKind::Word;
5459 }
5460 }
5461
5462 if c.is_whitespace() {
5463 return CharKind::Whitespace;
5464 }
5465
5466 if ignore_punctuation {
5467 CharKind::Word
5468 } else {
5469 CharKind::Punctuation
5470 }
5471 }
5472
5473 pub fn kind(&self, c: char) -> CharKind {
5474 self.kind_with(c, self.ignore_punctuation)
5475 }
5476}
5477
5478/// Find all of the ranges of whitespace that occur at the ends of lines
5479/// in the given rope.
5480///
5481/// This could also be done with a regex search, but this implementation
5482/// avoids copying text.
5483pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
5484 let mut ranges = Vec::new();
5485
5486 let mut offset = 0;
5487 let mut prev_chunk_trailing_whitespace_range = 0..0;
5488 for chunk in rope.chunks() {
5489 let mut prev_line_trailing_whitespace_range = 0..0;
5490 for (i, line) in chunk.split('\n').enumerate() {
5491 let line_end_offset = offset + line.len();
5492 let trimmed_line_len = line.trim_end_matches([' ', '\t']).len();
5493 let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
5494
5495 if i == 0 && trimmed_line_len == 0 {
5496 trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
5497 }
5498 if !prev_line_trailing_whitespace_range.is_empty() {
5499 ranges.push(prev_line_trailing_whitespace_range);
5500 }
5501
5502 offset = line_end_offset + 1;
5503 prev_line_trailing_whitespace_range = trailing_whitespace_range;
5504 }
5505
5506 offset -= 1;
5507 prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
5508 }
5509
5510 if !prev_chunk_trailing_whitespace_range.is_empty() {
5511 ranges.push(prev_chunk_trailing_whitespace_range);
5512 }
5513
5514 ranges
5515}