1pub mod row_chunk;
2
3use crate::{
4 DebuggerTextObject, LanguageScope, Outline, OutlineConfig, RunnableCapture, RunnableTag,
5 TextObject, TreeSitterOptions,
6 diagnostic_set::{DiagnosticEntry, DiagnosticEntryRef, DiagnosticGroup},
7 language_settings::{LanguageSettings, language_settings},
8 outline::OutlineItem,
9 row_chunk::RowChunks,
10 syntax_map::{
11 SyntaxLayer, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures, SyntaxMapMatch,
12 SyntaxMapMatches, SyntaxSnapshot, ToTreeSitterPoint,
13 },
14 task_context::RunnableRange,
15 text_diff::text_diff,
16 unified_diff,
17};
18pub use crate::{
19 Grammar, Language, LanguageRegistry,
20 diagnostic_set::DiagnosticSet,
21 highlight_map::{HighlightId, HighlightMap},
22 proto,
23};
24use anyhow::{Context as _, Result};
25pub use clock::ReplicaId;
26use clock::{Global, Lamport};
27use collections::{HashMap, HashSet};
28use fs::MTime;
29use futures::channel::oneshot;
30use gpui::{
31 App, AppContext as _, Context, Entity, EventEmitter, HighlightStyle, SharedString, StyledText,
32 Task, TaskLabel, TextStyle,
33};
34
35use lsp::{LanguageServerId, NumberOrString};
36use parking_lot::{Mutex, RawMutex, lock_api::MutexGuard};
37use serde::{Deserialize, Serialize};
38use serde_json::Value;
39use settings::WorktreeId;
40use smallvec::SmallVec;
41use smol::future::yield_now;
42use std::{
43 any::Any,
44 borrow::Cow,
45 cell::Cell,
46 cmp::{self, Ordering, Reverse},
47 collections::{BTreeMap, BTreeSet},
48 future::Future,
49 iter::{self, Iterator, Peekable},
50 mem,
51 num::NonZeroU32,
52 ops::{Deref, Range},
53 path::PathBuf,
54 rc,
55 sync::{Arc, LazyLock},
56 time::{Duration, Instant},
57 vec,
58};
59use sum_tree::TreeMap;
60use text::operation_queue::OperationQueue;
61use text::*;
62pub use text::{
63 Anchor, Bias, Buffer as TextBuffer, BufferId, BufferSnapshot as TextBufferSnapshot, Edit,
64 LineIndent, OffsetRangeExt, OffsetUtf16, Patch, Point, PointUtf16, Rope, Selection,
65 SelectionGoal, Subscription, TextDimension, TextSummary, ToOffset, ToOffsetUtf16, ToPoint,
66 ToPointUtf16, Transaction, TransactionId, Unclipped,
67};
68use theme::{ActiveTheme as _, SyntaxTheme};
69#[cfg(any(test, feature = "test-support"))]
70use util::RandomCharIter;
71use util::{RangeExt, debug_panic, maybe, paths::PathStyle, rel_path::RelPath};
72
73#[cfg(any(test, feature = "test-support"))]
74pub use {tree_sitter_python, tree_sitter_rust, tree_sitter_typescript};
75
76pub use lsp::DiagnosticSeverity;
77
78/// A label for the background task spawned by the buffer to compute
79/// a diff against the contents of its file.
80pub static BUFFER_DIFF_TASK: LazyLock<TaskLabel> = LazyLock::new(TaskLabel::new);
81
82/// Indicate whether a [`Buffer`] has permissions to edit.
83#[derive(PartialEq, Clone, Copy, Debug)]
84pub enum Capability {
85 /// The buffer is a mutable replica.
86 ReadWrite,
87 /// The buffer is a read-only replica.
88 ReadOnly,
89}
90
91pub type BufferRow = u32;
92
93/// An in-memory representation of a source code file, including its text,
94/// syntax trees, git status, and diagnostics.
95pub struct Buffer {
96 text: TextBuffer,
97 branch_state: Option<BufferBranchState>,
98 /// Filesystem state, `None` when there is no path.
99 file: Option<Arc<dyn File>>,
100 /// The mtime of the file when this buffer was last loaded from
101 /// or saved to disk.
102 saved_mtime: Option<MTime>,
103 /// The version vector when this buffer was last loaded from
104 /// or saved to disk.
105 saved_version: clock::Global,
106 preview_version: clock::Global,
107 transaction_depth: usize,
108 was_dirty_before_starting_transaction: Option<bool>,
109 reload_task: Option<Task<Result<()>>>,
110 language: Option<Arc<Language>>,
111 autoindent_requests: Vec<Arc<AutoindentRequest>>,
112 wait_for_autoindent_txs: Vec<oneshot::Sender<()>>,
113 pending_autoindent: Option<Task<()>>,
114 sync_parse_timeout: Duration,
115 syntax_map: Mutex<SyntaxMap>,
116 reparse: Option<Task<()>>,
117 parse_status: (watch::Sender<ParseStatus>, watch::Receiver<ParseStatus>),
118 non_text_state_update_count: usize,
119 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
120 remote_selections: TreeMap<ReplicaId, SelectionSet>,
121 diagnostics_timestamp: clock::Lamport,
122 completion_triggers: BTreeSet<String>,
123 completion_triggers_per_language_server: HashMap<LanguageServerId, BTreeSet<String>>,
124 completion_triggers_timestamp: clock::Lamport,
125 deferred_ops: OperationQueue<Operation>,
126 capability: Capability,
127 has_conflict: bool,
128 /// Memoize calls to has_changes_since(saved_version).
129 /// The contents of a cell are (self.version, has_changes) at the time of a last call.
130 has_unsaved_edits: Cell<(clock::Global, bool)>,
131 change_bits: Vec<rc::Weak<Cell<bool>>>,
132 _subscriptions: Vec<gpui::Subscription>,
133 tree_sitter_data: Arc<Mutex<TreeSitterData>>,
134}
135
136#[derive(Debug, Clone)]
137pub struct TreeSitterData {
138 chunks: RowChunks,
139 brackets_by_chunks: Vec<Option<Vec<BracketMatch<usize>>>>,
140}
141
142const MAX_ROWS_IN_A_CHUNK: u32 = 50;
143
144impl TreeSitterData {
145 fn clear(&mut self) {
146 self.brackets_by_chunks = vec![None; self.chunks.len()];
147 }
148
149 fn new(snapshot: text::BufferSnapshot) -> Self {
150 let chunks = RowChunks::new(snapshot, MAX_ROWS_IN_A_CHUNK);
151 Self {
152 brackets_by_chunks: vec![None; chunks.len()],
153 chunks,
154 }
155 }
156}
157
158#[derive(Copy, Clone, Debug, PartialEq, Eq)]
159pub enum ParseStatus {
160 Idle,
161 Parsing,
162}
163
164struct BufferBranchState {
165 base_buffer: Entity<Buffer>,
166 merged_operations: Vec<Lamport>,
167}
168
169/// An immutable, cheaply cloneable representation of a fixed
170/// state of a buffer.
171pub struct BufferSnapshot {
172 pub text: text::BufferSnapshot,
173 pub syntax: SyntaxSnapshot,
174 file: Option<Arc<dyn File>>,
175 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
176 remote_selections: TreeMap<ReplicaId, SelectionSet>,
177 language: Option<Arc<Language>>,
178 non_text_state_update_count: usize,
179 tree_sitter_data: Arc<Mutex<TreeSitterData>>,
180}
181
182/// The kind and amount of indentation in a particular line. For now,
183/// assumes that indentation is all the same character.
184#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
185pub struct IndentSize {
186 /// The number of bytes that comprise the indentation.
187 pub len: u32,
188 /// The kind of whitespace used for indentation.
189 pub kind: IndentKind,
190}
191
192/// A whitespace character that's used for indentation.
193#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
194pub enum IndentKind {
195 /// An ASCII space character.
196 #[default]
197 Space,
198 /// An ASCII tab character.
199 Tab,
200}
201
202/// The shape of a selection cursor.
203#[derive(Copy, Clone, Debug, Default, PartialEq, Eq)]
204pub enum CursorShape {
205 /// A vertical bar
206 #[default]
207 Bar,
208 /// A block that surrounds the following character
209 Block,
210 /// An underline that runs along the following character
211 Underline,
212 /// A box drawn around the following character
213 Hollow,
214}
215
216impl From<settings::CursorShape> for CursorShape {
217 fn from(shape: settings::CursorShape) -> Self {
218 match shape {
219 settings::CursorShape::Bar => CursorShape::Bar,
220 settings::CursorShape::Block => CursorShape::Block,
221 settings::CursorShape::Underline => CursorShape::Underline,
222 settings::CursorShape::Hollow => CursorShape::Hollow,
223 }
224 }
225}
226
227#[derive(Clone, Debug)]
228struct SelectionSet {
229 line_mode: bool,
230 cursor_shape: CursorShape,
231 selections: Arc<[Selection<Anchor>]>,
232 lamport_timestamp: clock::Lamport,
233}
234
235/// A diagnostic associated with a certain range of a buffer.
236#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
237pub struct Diagnostic {
238 /// The name of the service that produced this diagnostic.
239 pub source: Option<String>,
240 /// The ID provided by the dynamic registration that produced this diagnostic.
241 pub registration_id: Option<SharedString>,
242 /// A machine-readable code that identifies this diagnostic.
243 pub code: Option<NumberOrString>,
244 pub code_description: Option<lsp::Uri>,
245 /// Whether this diagnostic is a hint, warning, or error.
246 pub severity: DiagnosticSeverity,
247 /// The human-readable message associated with this diagnostic.
248 pub message: String,
249 /// The human-readable message (in markdown format)
250 pub markdown: Option<String>,
251 /// An id that identifies the group to which this diagnostic belongs.
252 ///
253 /// When a language server produces a diagnostic with
254 /// one or more associated diagnostics, those diagnostics are all
255 /// assigned a single group ID.
256 pub group_id: usize,
257 /// Whether this diagnostic is the primary diagnostic for its group.
258 ///
259 /// In a given group, the primary diagnostic is the top-level diagnostic
260 /// returned by the language server. The non-primary diagnostics are the
261 /// associated diagnostics.
262 pub is_primary: bool,
263 /// Whether this diagnostic is considered to originate from an analysis of
264 /// files on disk, as opposed to any unsaved buffer contents. This is a
265 /// property of a given diagnostic source, and is configured for a given
266 /// language server via the [`LspAdapter::disk_based_diagnostic_sources`](crate::LspAdapter::disk_based_diagnostic_sources) method
267 /// for the language server.
268 pub is_disk_based: bool,
269 /// Whether this diagnostic marks unnecessary code.
270 pub is_unnecessary: bool,
271 /// Quick separation of diagnostics groups based by their source.
272 pub source_kind: DiagnosticSourceKind,
273 /// Data from language server that produced this diagnostic. Passed back to the LS when we request code actions for this diagnostic.
274 pub data: Option<Value>,
275 /// Whether to underline the corresponding text range in the editor.
276 pub underline: bool,
277}
278
279#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)]
280pub enum DiagnosticSourceKind {
281 Pulled,
282 Pushed,
283 Other,
284}
285
286/// An operation used to synchronize this buffer with its other replicas.
287#[derive(Clone, Debug, PartialEq)]
288pub enum Operation {
289 /// A text operation.
290 Buffer(text::Operation),
291
292 /// An update to the buffer's diagnostics.
293 UpdateDiagnostics {
294 /// The id of the language server that produced the new diagnostics.
295 server_id: LanguageServerId,
296 /// The diagnostics.
297 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
298 /// The buffer's lamport timestamp.
299 lamport_timestamp: clock::Lamport,
300 },
301
302 /// An update to the most recent selections in this buffer.
303 UpdateSelections {
304 /// The selections.
305 selections: Arc<[Selection<Anchor>]>,
306 /// The buffer's lamport timestamp.
307 lamport_timestamp: clock::Lamport,
308 /// Whether the selections are in 'line mode'.
309 line_mode: bool,
310 /// The [`CursorShape`] associated with these selections.
311 cursor_shape: CursorShape,
312 },
313
314 /// An update to the characters that should trigger autocompletion
315 /// for this buffer.
316 UpdateCompletionTriggers {
317 /// The characters that trigger autocompletion.
318 triggers: Vec<String>,
319 /// The buffer's lamport timestamp.
320 lamport_timestamp: clock::Lamport,
321 /// The language server ID.
322 server_id: LanguageServerId,
323 },
324
325 /// An update to the line ending type of this buffer.
326 UpdateLineEnding {
327 /// The line ending type.
328 line_ending: LineEnding,
329 /// The buffer's lamport timestamp.
330 lamport_timestamp: clock::Lamport,
331 },
332}
333
334/// An event that occurs in a buffer.
335#[derive(Clone, Debug, PartialEq)]
336pub enum BufferEvent {
337 /// The buffer was changed in a way that must be
338 /// propagated to its other replicas.
339 Operation {
340 operation: Operation,
341 is_local: bool,
342 },
343 /// The buffer was edited.
344 Edited,
345 /// The buffer's `dirty` bit changed.
346 DirtyChanged,
347 /// The buffer was saved.
348 Saved,
349 /// The buffer's file was changed on disk.
350 FileHandleChanged,
351 /// The buffer was reloaded.
352 Reloaded,
353 /// The buffer is in need of a reload
354 ReloadNeeded,
355 /// The buffer's language was changed.
356 LanguageChanged,
357 /// The buffer's syntax trees were updated.
358 Reparsed,
359 /// The buffer's diagnostics were updated.
360 DiagnosticsUpdated,
361 /// The buffer gained or lost editing capabilities.
362 CapabilityChanged,
363}
364
365/// The file associated with a buffer.
366pub trait File: Send + Sync + Any {
367 /// Returns the [`LocalFile`] associated with this file, if the
368 /// file is local.
369 fn as_local(&self) -> Option<&dyn LocalFile>;
370
371 /// Returns whether this file is local.
372 fn is_local(&self) -> bool {
373 self.as_local().is_some()
374 }
375
376 /// Returns whether the file is new, exists in storage, or has been deleted. Includes metadata
377 /// only available in some states, such as modification time.
378 fn disk_state(&self) -> DiskState;
379
380 /// Returns the path of this file relative to the worktree's root directory.
381 fn path(&self) -> &Arc<RelPath>;
382
383 /// Returns the path of this file relative to the worktree's parent directory (this means it
384 /// includes the name of the worktree's root folder).
385 fn full_path(&self, cx: &App) -> PathBuf;
386
387 /// Returns the path style of this file.
388 fn path_style(&self, cx: &App) -> PathStyle;
389
390 /// Returns the last component of this handle's absolute path. If this handle refers to the root
391 /// of its worktree, then this method will return the name of the worktree itself.
392 fn file_name<'a>(&'a self, cx: &'a App) -> &'a str;
393
394 /// Returns the id of the worktree to which this file belongs.
395 ///
396 /// This is needed for looking up project-specific settings.
397 fn worktree_id(&self, cx: &App) -> WorktreeId;
398
399 /// Converts this file into a protobuf message.
400 fn to_proto(&self, cx: &App) -> rpc::proto::File;
401
402 /// Return whether Zed considers this to be a private file.
403 fn is_private(&self) -> bool;
404}
405
406/// The file's storage status - whether it's stored (`Present`), and if so when it was last
407/// modified. In the case where the file is not stored, it can be either `New` or `Deleted`. In the
408/// UI these two states are distinguished. For example, the buffer tab does not display a deletion
409/// indicator for new files.
410#[derive(Copy, Clone, Debug, PartialEq)]
411pub enum DiskState {
412 /// File created in Zed that has not been saved.
413 New,
414 /// File present on the filesystem.
415 Present { mtime: MTime },
416 /// Deleted file that was previously present.
417 Deleted,
418}
419
420impl DiskState {
421 /// Returns the file's last known modification time on disk.
422 pub fn mtime(self) -> Option<MTime> {
423 match self {
424 DiskState::New => None,
425 DiskState::Present { mtime } => Some(mtime),
426 DiskState::Deleted => None,
427 }
428 }
429
430 pub fn exists(&self) -> bool {
431 match self {
432 DiskState::New => false,
433 DiskState::Present { .. } => true,
434 DiskState::Deleted => false,
435 }
436 }
437}
438
439/// The file associated with a buffer, in the case where the file is on the local disk.
440pub trait LocalFile: File {
441 /// Returns the absolute path of this file
442 fn abs_path(&self, cx: &App) -> PathBuf;
443
444 /// Loads the file contents from disk and returns them as a UTF-8 encoded string.
445 fn load(&self, cx: &App) -> Task<Result<String>>;
446
447 /// Loads the file's contents from disk.
448 fn load_bytes(&self, cx: &App) -> Task<Result<Vec<u8>>>;
449}
450
451/// The auto-indent behavior associated with an editing operation.
452/// For some editing operations, each affected line of text has its
453/// indentation recomputed. For other operations, the entire block
454/// of edited text is adjusted uniformly.
455#[derive(Clone, Debug)]
456pub enum AutoindentMode {
457 /// Indent each line of inserted text.
458 EachLine,
459 /// Apply the same indentation adjustment to all of the lines
460 /// in a given insertion.
461 Block {
462 /// The original indentation column of the first line of each
463 /// insertion, if it has been copied.
464 ///
465 /// Knowing this makes it possible to preserve the relative indentation
466 /// of every line in the insertion from when it was copied.
467 ///
468 /// If the original indent column is `a`, and the first line of insertion
469 /// is then auto-indented to column `b`, then every other line of
470 /// the insertion will be auto-indented to column `b - a`
471 original_indent_columns: Vec<Option<u32>>,
472 },
473}
474
475#[derive(Clone)]
476struct AutoindentRequest {
477 before_edit: BufferSnapshot,
478 entries: Vec<AutoindentRequestEntry>,
479 is_block_mode: bool,
480 ignore_empty_lines: bool,
481}
482
483#[derive(Debug, Clone)]
484struct AutoindentRequestEntry {
485 /// A range of the buffer whose indentation should be adjusted.
486 range: Range<Anchor>,
487 /// Whether or not these lines should be considered brand new, for the
488 /// purpose of auto-indent. When text is not new, its indentation will
489 /// only be adjusted if the suggested indentation level has *changed*
490 /// since the edit was made.
491 first_line_is_new: bool,
492 indent_size: IndentSize,
493 original_indent_column: Option<u32>,
494}
495
496#[derive(Debug)]
497struct IndentSuggestion {
498 basis_row: u32,
499 delta: Ordering,
500 within_error: bool,
501}
502
503struct BufferChunkHighlights<'a> {
504 captures: SyntaxMapCaptures<'a>,
505 next_capture: Option<SyntaxMapCapture<'a>>,
506 stack: Vec<(usize, HighlightId)>,
507 highlight_maps: Vec<HighlightMap>,
508}
509
510/// An iterator that yields chunks of a buffer's text, along with their
511/// syntax highlights and diagnostic status.
512pub struct BufferChunks<'a> {
513 buffer_snapshot: Option<&'a BufferSnapshot>,
514 range: Range<usize>,
515 chunks: text::Chunks<'a>,
516 diagnostic_endpoints: Option<Peekable<vec::IntoIter<DiagnosticEndpoint>>>,
517 error_depth: usize,
518 warning_depth: usize,
519 information_depth: usize,
520 hint_depth: usize,
521 unnecessary_depth: usize,
522 underline: bool,
523 highlights: Option<BufferChunkHighlights<'a>>,
524}
525
526/// A chunk of a buffer's text, along with its syntax highlight and
527/// diagnostic status.
528#[derive(Clone, Debug, Default)]
529pub struct Chunk<'a> {
530 /// The text of the chunk.
531 pub text: &'a str,
532 /// The syntax highlighting style of the chunk.
533 pub syntax_highlight_id: Option<HighlightId>,
534 /// The highlight style that has been applied to this chunk in
535 /// the editor.
536 pub highlight_style: Option<HighlightStyle>,
537 /// The severity of diagnostic associated with this chunk, if any.
538 pub diagnostic_severity: Option<DiagnosticSeverity>,
539 /// A bitset of which characters are tabs in this string.
540 pub tabs: u128,
541 /// Bitmap of character indices in this chunk
542 pub chars: u128,
543 /// Whether this chunk of text is marked as unnecessary.
544 pub is_unnecessary: bool,
545 /// Whether this chunk of text was originally a tab character.
546 pub is_tab: bool,
547 /// Whether this chunk of text was originally an inlay.
548 pub is_inlay: bool,
549 /// Whether to underline the corresponding text range in the editor.
550 pub underline: bool,
551}
552
553/// A set of edits to a given version of a buffer, computed asynchronously.
554#[derive(Debug)]
555pub struct Diff {
556 pub base_version: clock::Global,
557 pub line_ending: LineEnding,
558 pub edits: Vec<(Range<usize>, Arc<str>)>,
559}
560
561#[derive(Debug, Clone, Copy)]
562pub(crate) struct DiagnosticEndpoint {
563 offset: usize,
564 is_start: bool,
565 underline: bool,
566 severity: DiagnosticSeverity,
567 is_unnecessary: bool,
568}
569
570/// A class of characters, used for characterizing a run of text.
571#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
572pub enum CharKind {
573 /// Whitespace.
574 Whitespace,
575 /// Punctuation.
576 Punctuation,
577 /// Word.
578 Word,
579}
580
581/// Context for character classification within a specific scope.
582#[derive(Copy, Clone, Eq, PartialEq, Debug)]
583pub enum CharScopeContext {
584 /// Character classification for completion queries.
585 ///
586 /// This context treats certain characters as word constituents that would
587 /// normally be considered punctuation, such as '-' in Tailwind classes
588 /// ("bg-yellow-100") or '.' in import paths ("foo.ts").
589 Completion,
590 /// Character classification for linked edits.
591 ///
592 /// This context handles characters that should be treated as part of
593 /// identifiers during linked editing operations, such as '.' in JSX
594 /// component names like `<Animated.View>`.
595 LinkedEdit,
596}
597
598/// A runnable is a set of data about a region that could be resolved into a task
599pub struct Runnable {
600 pub tags: SmallVec<[RunnableTag; 1]>,
601 pub language: Arc<Language>,
602 pub buffer: BufferId,
603}
604
605#[derive(Default, Clone, Debug)]
606pub struct HighlightedText {
607 pub text: SharedString,
608 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
609}
610
611#[derive(Default, Debug)]
612struct HighlightedTextBuilder {
613 pub text: String,
614 highlights: Vec<(Range<usize>, HighlightStyle)>,
615}
616
617impl HighlightedText {
618 pub fn from_buffer_range<T: ToOffset>(
619 range: Range<T>,
620 snapshot: &text::BufferSnapshot,
621 syntax_snapshot: &SyntaxSnapshot,
622 override_style: Option<HighlightStyle>,
623 syntax_theme: &SyntaxTheme,
624 ) -> Self {
625 let mut highlighted_text = HighlightedTextBuilder::default();
626 highlighted_text.add_text_from_buffer_range(
627 range,
628 snapshot,
629 syntax_snapshot,
630 override_style,
631 syntax_theme,
632 );
633 highlighted_text.build()
634 }
635
636 pub fn to_styled_text(&self, default_style: &TextStyle) -> StyledText {
637 gpui::StyledText::new(self.text.clone())
638 .with_default_highlights(default_style, self.highlights.iter().cloned())
639 }
640
641 /// Returns the first line without leading whitespace unless highlighted
642 /// and a boolean indicating if there are more lines after
643 pub fn first_line_preview(self) -> (Self, bool) {
644 let newline_ix = self.text.find('\n').unwrap_or(self.text.len());
645 let first_line = &self.text[..newline_ix];
646
647 // Trim leading whitespace, unless an edit starts prior to it.
648 let mut preview_start_ix = first_line.len() - first_line.trim_start().len();
649 if let Some((first_highlight_range, _)) = self.highlights.first() {
650 preview_start_ix = preview_start_ix.min(first_highlight_range.start);
651 }
652
653 let preview_text = &first_line[preview_start_ix..];
654 let preview_highlights = self
655 .highlights
656 .into_iter()
657 .skip_while(|(range, _)| range.end <= preview_start_ix)
658 .take_while(|(range, _)| range.start < newline_ix)
659 .filter_map(|(mut range, highlight)| {
660 range.start = range.start.saturating_sub(preview_start_ix);
661 range.end = range.end.min(newline_ix).saturating_sub(preview_start_ix);
662 if range.is_empty() {
663 None
664 } else {
665 Some((range, highlight))
666 }
667 });
668
669 let preview = Self {
670 text: SharedString::new(preview_text),
671 highlights: preview_highlights.collect(),
672 };
673
674 (preview, self.text.len() > newline_ix)
675 }
676}
677
678impl HighlightedTextBuilder {
679 pub fn build(self) -> HighlightedText {
680 HighlightedText {
681 text: self.text.into(),
682 highlights: self.highlights,
683 }
684 }
685
686 pub fn add_text_from_buffer_range<T: ToOffset>(
687 &mut self,
688 range: Range<T>,
689 snapshot: &text::BufferSnapshot,
690 syntax_snapshot: &SyntaxSnapshot,
691 override_style: Option<HighlightStyle>,
692 syntax_theme: &SyntaxTheme,
693 ) {
694 let range = range.to_offset(snapshot);
695 for chunk in Self::highlighted_chunks(range, snapshot, syntax_snapshot) {
696 let start = self.text.len();
697 self.text.push_str(chunk.text);
698 let end = self.text.len();
699
700 if let Some(highlight_style) = chunk
701 .syntax_highlight_id
702 .and_then(|id| id.style(syntax_theme))
703 {
704 let highlight_style = override_style.map_or(highlight_style, |override_style| {
705 highlight_style.highlight(override_style)
706 });
707 self.highlights.push((start..end, highlight_style));
708 } else if let Some(override_style) = override_style {
709 self.highlights.push((start..end, override_style));
710 }
711 }
712 }
713
714 fn highlighted_chunks<'a>(
715 range: Range<usize>,
716 snapshot: &'a text::BufferSnapshot,
717 syntax_snapshot: &'a SyntaxSnapshot,
718 ) -> BufferChunks<'a> {
719 let captures = syntax_snapshot.captures(range.clone(), snapshot, |grammar| {
720 grammar
721 .highlights_config
722 .as_ref()
723 .map(|config| &config.query)
724 });
725
726 let highlight_maps = captures
727 .grammars()
728 .iter()
729 .map(|grammar| grammar.highlight_map())
730 .collect();
731
732 BufferChunks::new(
733 snapshot.as_rope(),
734 range,
735 Some((captures, highlight_maps)),
736 false,
737 None,
738 )
739 }
740}
741
742#[derive(Clone)]
743pub struct EditPreview {
744 old_snapshot: text::BufferSnapshot,
745 applied_edits_snapshot: text::BufferSnapshot,
746 syntax_snapshot: SyntaxSnapshot,
747}
748
749impl EditPreview {
750 pub fn as_unified_diff(&self, edits: &[(Range<Anchor>, impl AsRef<str>)]) -> Option<String> {
751 let (first, _) = edits.first()?;
752 let (last, _) = edits.last()?;
753
754 let start = first.start.to_point(&self.old_snapshot);
755 let old_end = last.end.to_point(&self.old_snapshot);
756 let new_end = last
757 .end
758 .bias_right(&self.old_snapshot)
759 .to_point(&self.applied_edits_snapshot);
760
761 let start = Point::new(start.row.saturating_sub(3), 0);
762 let old_end = Point::new(old_end.row + 4, 0).min(self.old_snapshot.max_point());
763 let new_end = Point::new(new_end.row + 4, 0).min(self.applied_edits_snapshot.max_point());
764
765 Some(unified_diff(
766 &self
767 .old_snapshot
768 .text_for_range(start..old_end)
769 .collect::<String>(),
770 &self
771 .applied_edits_snapshot
772 .text_for_range(start..new_end)
773 .collect::<String>(),
774 ))
775 }
776
777 pub fn highlight_edits(
778 &self,
779 current_snapshot: &BufferSnapshot,
780 edits: &[(Range<Anchor>, impl AsRef<str>)],
781 include_deletions: bool,
782 cx: &App,
783 ) -> HighlightedText {
784 let Some(visible_range_in_preview_snapshot) = self.compute_visible_range(edits) else {
785 return HighlightedText::default();
786 };
787
788 let mut highlighted_text = HighlightedTextBuilder::default();
789
790 let visible_range_in_preview_snapshot =
791 visible_range_in_preview_snapshot.to_offset(&self.applied_edits_snapshot);
792 let mut offset_in_preview_snapshot = visible_range_in_preview_snapshot.start;
793
794 let insertion_highlight_style = HighlightStyle {
795 background_color: Some(cx.theme().status().created_background),
796 ..Default::default()
797 };
798 let deletion_highlight_style = HighlightStyle {
799 background_color: Some(cx.theme().status().deleted_background),
800 ..Default::default()
801 };
802 let syntax_theme = cx.theme().syntax();
803
804 for (range, edit_text) in edits {
805 let edit_new_end_in_preview_snapshot = range
806 .end
807 .bias_right(&self.old_snapshot)
808 .to_offset(&self.applied_edits_snapshot);
809 let edit_start_in_preview_snapshot =
810 edit_new_end_in_preview_snapshot - edit_text.as_ref().len();
811
812 let unchanged_range_in_preview_snapshot =
813 offset_in_preview_snapshot..edit_start_in_preview_snapshot;
814 if !unchanged_range_in_preview_snapshot.is_empty() {
815 highlighted_text.add_text_from_buffer_range(
816 unchanged_range_in_preview_snapshot,
817 &self.applied_edits_snapshot,
818 &self.syntax_snapshot,
819 None,
820 syntax_theme,
821 );
822 }
823
824 let range_in_current_snapshot = range.to_offset(current_snapshot);
825 if include_deletions && !range_in_current_snapshot.is_empty() {
826 highlighted_text.add_text_from_buffer_range(
827 range_in_current_snapshot,
828 ¤t_snapshot.text,
829 ¤t_snapshot.syntax,
830 Some(deletion_highlight_style),
831 syntax_theme,
832 );
833 }
834
835 if !edit_text.as_ref().is_empty() {
836 highlighted_text.add_text_from_buffer_range(
837 edit_start_in_preview_snapshot..edit_new_end_in_preview_snapshot,
838 &self.applied_edits_snapshot,
839 &self.syntax_snapshot,
840 Some(insertion_highlight_style),
841 syntax_theme,
842 );
843 }
844
845 offset_in_preview_snapshot = edit_new_end_in_preview_snapshot;
846 }
847
848 highlighted_text.add_text_from_buffer_range(
849 offset_in_preview_snapshot..visible_range_in_preview_snapshot.end,
850 &self.applied_edits_snapshot,
851 &self.syntax_snapshot,
852 None,
853 syntax_theme,
854 );
855
856 highlighted_text.build()
857 }
858
859 pub fn build_result_buffer(&self, cx: &mut App) -> Entity<Buffer> {
860 cx.new(|cx| {
861 let mut buffer = Buffer::local_normalized(
862 self.applied_edits_snapshot.as_rope().clone(),
863 self.applied_edits_snapshot.line_ending(),
864 cx,
865 );
866 buffer.set_language_async(self.syntax_snapshot.root_language(), cx);
867 buffer
868 })
869 }
870
871 pub fn compute_visible_range<T>(&self, edits: &[(Range<Anchor>, T)]) -> Option<Range<Point>> {
872 let (first, _) = edits.first()?;
873 let (last, _) = edits.last()?;
874
875 let start = first
876 .start
877 .bias_left(&self.old_snapshot)
878 .to_point(&self.applied_edits_snapshot);
879 let end = last
880 .end
881 .bias_right(&self.old_snapshot)
882 .to_point(&self.applied_edits_snapshot);
883
884 // Ensure that the first line of the first edit and the last line of the last edit are always fully visible
885 let range = Point::new(start.row, 0)
886 ..Point::new(end.row, self.applied_edits_snapshot.line_len(end.row));
887
888 Some(range)
889 }
890}
891
892#[derive(Clone, Debug, PartialEq, Eq)]
893pub struct BracketMatch<T> {
894 pub open_range: Range<T>,
895 pub close_range: Range<T>,
896 pub newline_only: bool,
897 pub syntax_layer_depth: usize,
898 pub color_index: Option<usize>,
899}
900
901impl<T> BracketMatch<T> {
902 pub fn bracket_ranges(self) -> (Range<T>, Range<T>) {
903 (self.open_range, self.close_range)
904 }
905}
906
907impl Buffer {
908 /// Create a new buffer with the given base text.
909 pub fn local<T: Into<String>>(base_text: T, cx: &Context<Self>) -> Self {
910 Self::build(
911 TextBuffer::new(
912 ReplicaId::LOCAL,
913 cx.entity_id().as_non_zero_u64().into(),
914 base_text.into(),
915 ),
916 None,
917 Capability::ReadWrite,
918 )
919 }
920
921 /// Create a new buffer with the given base text that has proper line endings and other normalization applied.
922 pub fn local_normalized(
923 base_text_normalized: Rope,
924 line_ending: LineEnding,
925 cx: &Context<Self>,
926 ) -> Self {
927 Self::build(
928 TextBuffer::new_normalized(
929 ReplicaId::LOCAL,
930 cx.entity_id().as_non_zero_u64().into(),
931 line_ending,
932 base_text_normalized,
933 ),
934 None,
935 Capability::ReadWrite,
936 )
937 }
938
939 /// Create a new buffer that is a replica of a remote buffer.
940 pub fn remote(
941 remote_id: BufferId,
942 replica_id: ReplicaId,
943 capability: Capability,
944 base_text: impl Into<String>,
945 ) -> Self {
946 Self::build(
947 TextBuffer::new(replica_id, remote_id, base_text.into()),
948 None,
949 capability,
950 )
951 }
952
953 /// Create a new buffer that is a replica of a remote buffer, populating its
954 /// state from the given protobuf message.
955 pub fn from_proto(
956 replica_id: ReplicaId,
957 capability: Capability,
958 message: proto::BufferState,
959 file: Option<Arc<dyn File>>,
960 ) -> Result<Self> {
961 let buffer_id = BufferId::new(message.id).context("Could not deserialize buffer_id")?;
962 let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text);
963 let mut this = Self::build(buffer, file, capability);
964 this.text.set_line_ending(proto::deserialize_line_ending(
965 rpc::proto::LineEnding::from_i32(message.line_ending).context("missing line_ending")?,
966 ));
967 this.saved_version = proto::deserialize_version(&message.saved_version);
968 this.saved_mtime = message.saved_mtime.map(|time| time.into());
969 Ok(this)
970 }
971
972 /// Serialize the buffer's state to a protobuf message.
973 pub fn to_proto(&self, cx: &App) -> proto::BufferState {
974 proto::BufferState {
975 id: self.remote_id().into(),
976 file: self.file.as_ref().map(|f| f.to_proto(cx)),
977 base_text: self.base_text().to_string(),
978 line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
979 saved_version: proto::serialize_version(&self.saved_version),
980 saved_mtime: self.saved_mtime.map(|time| time.into()),
981 }
982 }
983
984 /// Serialize as protobufs all of the changes to the buffer since the given version.
985 pub fn serialize_ops(
986 &self,
987 since: Option<clock::Global>,
988 cx: &App,
989 ) -> Task<Vec<proto::Operation>> {
990 let mut operations = Vec::new();
991 operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
992
993 operations.extend(self.remote_selections.iter().map(|(_, set)| {
994 proto::serialize_operation(&Operation::UpdateSelections {
995 selections: set.selections.clone(),
996 lamport_timestamp: set.lamport_timestamp,
997 line_mode: set.line_mode,
998 cursor_shape: set.cursor_shape,
999 })
1000 }));
1001
1002 for (server_id, diagnostics) in &self.diagnostics {
1003 operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
1004 lamport_timestamp: self.diagnostics_timestamp,
1005 server_id: *server_id,
1006 diagnostics: diagnostics.iter().cloned().collect(),
1007 }));
1008 }
1009
1010 for (server_id, completions) in &self.completion_triggers_per_language_server {
1011 operations.push(proto::serialize_operation(
1012 &Operation::UpdateCompletionTriggers {
1013 triggers: completions.iter().cloned().collect(),
1014 lamport_timestamp: self.completion_triggers_timestamp,
1015 server_id: *server_id,
1016 },
1017 ));
1018 }
1019
1020 let text_operations = self.text.operations().clone();
1021 cx.background_spawn(async move {
1022 let since = since.unwrap_or_default();
1023 operations.extend(
1024 text_operations
1025 .iter()
1026 .filter(|(_, op)| !since.observed(op.timestamp()))
1027 .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
1028 );
1029 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
1030 operations
1031 })
1032 }
1033
1034 /// Assign a language to the buffer, returning the buffer.
1035 pub fn with_language_async(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
1036 self.set_language_async(Some(language), cx);
1037 self
1038 }
1039
1040 /// Assign a language to the buffer, blocking for up to 1ms to reparse the buffer, returning the buffer.
1041 pub fn with_language(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
1042 self.set_language(Some(language), cx);
1043 self
1044 }
1045
1046 /// Returns the [`Capability`] of this buffer.
1047 pub fn capability(&self) -> Capability {
1048 self.capability
1049 }
1050
1051 /// Whether this buffer can only be read.
1052 pub fn read_only(&self) -> bool {
1053 self.capability == Capability::ReadOnly
1054 }
1055
1056 /// Builds a [`Buffer`] with the given underlying [`TextBuffer`], diff base, [`File`] and [`Capability`].
1057 pub fn build(buffer: TextBuffer, file: Option<Arc<dyn File>>, capability: Capability) -> Self {
1058 let saved_mtime = file.as_ref().and_then(|file| file.disk_state().mtime());
1059 let snapshot = buffer.snapshot();
1060 let syntax_map = Mutex::new(SyntaxMap::new(&snapshot));
1061 let tree_sitter_data = TreeSitterData::new(snapshot);
1062 Self {
1063 saved_mtime,
1064 tree_sitter_data: Arc::new(Mutex::new(tree_sitter_data)),
1065 saved_version: buffer.version(),
1066 preview_version: buffer.version(),
1067 reload_task: None,
1068 transaction_depth: 0,
1069 was_dirty_before_starting_transaction: None,
1070 has_unsaved_edits: Cell::new((buffer.version(), false)),
1071 text: buffer,
1072 branch_state: None,
1073 file,
1074 capability,
1075 syntax_map,
1076 reparse: None,
1077 non_text_state_update_count: 0,
1078 sync_parse_timeout: Duration::from_millis(1),
1079 parse_status: watch::channel(ParseStatus::Idle),
1080 autoindent_requests: Default::default(),
1081 wait_for_autoindent_txs: Default::default(),
1082 pending_autoindent: Default::default(),
1083 language: None,
1084 remote_selections: Default::default(),
1085 diagnostics: Default::default(),
1086 diagnostics_timestamp: Lamport::MIN,
1087 completion_triggers: Default::default(),
1088 completion_triggers_per_language_server: Default::default(),
1089 completion_triggers_timestamp: Lamport::MIN,
1090 deferred_ops: OperationQueue::new(),
1091 has_conflict: false,
1092 change_bits: Default::default(),
1093 _subscriptions: Vec::new(),
1094 }
1095 }
1096
1097 pub fn build_snapshot(
1098 text: Rope,
1099 language: Option<Arc<Language>>,
1100 language_registry: Option<Arc<LanguageRegistry>>,
1101 cx: &mut App,
1102 ) -> impl Future<Output = BufferSnapshot> + use<> {
1103 let entity_id = cx.reserve_entity::<Self>().entity_id();
1104 let buffer_id = entity_id.as_non_zero_u64().into();
1105 async move {
1106 let text =
1107 TextBuffer::new_normalized(ReplicaId::LOCAL, buffer_id, Default::default(), text)
1108 .snapshot();
1109 let mut syntax = SyntaxMap::new(&text).snapshot();
1110 if let Some(language) = language.clone() {
1111 let language_registry = language_registry.clone();
1112 syntax.reparse(&text, language_registry, language);
1113 }
1114 let tree_sitter_data = TreeSitterData::new(text.clone());
1115 BufferSnapshot {
1116 text,
1117 syntax,
1118 file: None,
1119 diagnostics: Default::default(),
1120 remote_selections: Default::default(),
1121 tree_sitter_data: Arc::new(Mutex::new(tree_sitter_data)),
1122 language,
1123 non_text_state_update_count: 0,
1124 }
1125 }
1126 }
1127
1128 pub fn build_empty_snapshot(cx: &mut App) -> BufferSnapshot {
1129 let entity_id = cx.reserve_entity::<Self>().entity_id();
1130 let buffer_id = entity_id.as_non_zero_u64().into();
1131 let text = TextBuffer::new_normalized(
1132 ReplicaId::LOCAL,
1133 buffer_id,
1134 Default::default(),
1135 Rope::new(),
1136 )
1137 .snapshot();
1138 let syntax = SyntaxMap::new(&text).snapshot();
1139 let tree_sitter_data = TreeSitterData::new(text.clone());
1140 BufferSnapshot {
1141 text,
1142 syntax,
1143 tree_sitter_data: Arc::new(Mutex::new(tree_sitter_data)),
1144 file: None,
1145 diagnostics: Default::default(),
1146 remote_selections: Default::default(),
1147 language: None,
1148 non_text_state_update_count: 0,
1149 }
1150 }
1151
1152 #[cfg(any(test, feature = "test-support"))]
1153 pub fn build_snapshot_sync(
1154 text: Rope,
1155 language: Option<Arc<Language>>,
1156 language_registry: Option<Arc<LanguageRegistry>>,
1157 cx: &mut App,
1158 ) -> BufferSnapshot {
1159 let entity_id = cx.reserve_entity::<Self>().entity_id();
1160 let buffer_id = entity_id.as_non_zero_u64().into();
1161 let text =
1162 TextBuffer::new_normalized(ReplicaId::LOCAL, buffer_id, Default::default(), text)
1163 .snapshot();
1164 let mut syntax = SyntaxMap::new(&text).snapshot();
1165 if let Some(language) = language.clone() {
1166 syntax.reparse(&text, language_registry, language);
1167 }
1168 let tree_sitter_data = TreeSitterData::new(text.clone());
1169 BufferSnapshot {
1170 text,
1171 syntax,
1172 tree_sitter_data: Arc::new(Mutex::new(tree_sitter_data)),
1173 file: None,
1174 diagnostics: Default::default(),
1175 remote_selections: Default::default(),
1176 language,
1177 non_text_state_update_count: 0,
1178 }
1179 }
1180
1181 /// Retrieve a snapshot of the buffer's current state. This is computationally
1182 /// cheap, and allows reading from the buffer on a background thread.
1183 pub fn snapshot(&self) -> BufferSnapshot {
1184 let text = self.text.snapshot();
1185 let mut syntax_map = self.syntax_map.lock();
1186 syntax_map.interpolate(&text);
1187 let syntax = syntax_map.snapshot();
1188
1189 BufferSnapshot {
1190 text,
1191 syntax,
1192 tree_sitter_data: self.tree_sitter_data.clone(),
1193 file: self.file.clone(),
1194 remote_selections: self.remote_selections.clone(),
1195 diagnostics: self.diagnostics.clone(),
1196 language: self.language.clone(),
1197 non_text_state_update_count: self.non_text_state_update_count,
1198 }
1199 }
1200
1201 pub fn branch(&mut self, cx: &mut Context<Self>) -> Entity<Self> {
1202 let this = cx.entity();
1203 cx.new(|cx| {
1204 let mut branch = Self {
1205 branch_state: Some(BufferBranchState {
1206 base_buffer: this.clone(),
1207 merged_operations: Default::default(),
1208 }),
1209 language: self.language.clone(),
1210 has_conflict: self.has_conflict,
1211 has_unsaved_edits: Cell::new(self.has_unsaved_edits.get_mut().clone()),
1212 _subscriptions: vec![cx.subscribe(&this, Self::on_base_buffer_event)],
1213 ..Self::build(self.text.branch(), self.file.clone(), self.capability())
1214 };
1215 if let Some(language_registry) = self.language_registry() {
1216 branch.set_language_registry(language_registry);
1217 }
1218
1219 // Reparse the branch buffer so that we get syntax highlighting immediately.
1220 branch.reparse(cx, true);
1221
1222 branch
1223 })
1224 }
1225
1226 pub fn preview_edits(
1227 &self,
1228 edits: Arc<[(Range<Anchor>, Arc<str>)]>,
1229 cx: &App,
1230 ) -> Task<EditPreview> {
1231 let registry = self.language_registry();
1232 let language = self.language().cloned();
1233 let old_snapshot = self.text.snapshot();
1234 let mut branch_buffer = self.text.branch();
1235 let mut syntax_snapshot = self.syntax_map.lock().snapshot();
1236 cx.background_spawn(async move {
1237 if !edits.is_empty() {
1238 if let Some(language) = language.clone() {
1239 syntax_snapshot.reparse(&old_snapshot, registry.clone(), language);
1240 }
1241
1242 branch_buffer.edit(edits.iter().cloned());
1243 let snapshot = branch_buffer.snapshot();
1244 syntax_snapshot.interpolate(&snapshot);
1245
1246 if let Some(language) = language {
1247 syntax_snapshot.reparse(&snapshot, registry, language);
1248 }
1249 }
1250 EditPreview {
1251 old_snapshot,
1252 applied_edits_snapshot: branch_buffer.snapshot(),
1253 syntax_snapshot,
1254 }
1255 })
1256 }
1257
1258 /// Applies all of the changes in this buffer that intersect any of the
1259 /// given `ranges` to its base buffer.
1260 ///
1261 /// If `ranges` is empty, then all changes will be applied. This buffer must
1262 /// be a branch buffer to call this method.
1263 pub fn merge_into_base(&mut self, ranges: Vec<Range<usize>>, cx: &mut Context<Self>) {
1264 let Some(base_buffer) = self.base_buffer() else {
1265 debug_panic!("not a branch buffer");
1266 return;
1267 };
1268
1269 let mut ranges = if ranges.is_empty() {
1270 &[0..usize::MAX]
1271 } else {
1272 ranges.as_slice()
1273 }
1274 .iter()
1275 .peekable();
1276
1277 let mut edits = Vec::new();
1278 for edit in self.edits_since::<usize>(&base_buffer.read(cx).version()) {
1279 let mut is_included = false;
1280 while let Some(range) = ranges.peek() {
1281 if range.end < edit.new.start {
1282 ranges.next().unwrap();
1283 } else {
1284 if range.start <= edit.new.end {
1285 is_included = true;
1286 }
1287 break;
1288 }
1289 }
1290
1291 if is_included {
1292 edits.push((
1293 edit.old.clone(),
1294 self.text_for_range(edit.new.clone()).collect::<String>(),
1295 ));
1296 }
1297 }
1298
1299 let operation = base_buffer.update(cx, |base_buffer, cx| {
1300 // cx.emit(BufferEvent::DiffBaseChanged);
1301 base_buffer.edit(edits, None, cx)
1302 });
1303
1304 if let Some(operation) = operation
1305 && let Some(BufferBranchState {
1306 merged_operations, ..
1307 }) = &mut self.branch_state
1308 {
1309 merged_operations.push(operation);
1310 }
1311 }
1312
1313 fn on_base_buffer_event(
1314 &mut self,
1315 _: Entity<Buffer>,
1316 event: &BufferEvent,
1317 cx: &mut Context<Self>,
1318 ) {
1319 let BufferEvent::Operation { operation, .. } = event else {
1320 return;
1321 };
1322 let Some(BufferBranchState {
1323 merged_operations, ..
1324 }) = &mut self.branch_state
1325 else {
1326 return;
1327 };
1328
1329 let mut operation_to_undo = None;
1330 if let Operation::Buffer(text::Operation::Edit(operation)) = &operation
1331 && let Ok(ix) = merged_operations.binary_search(&operation.timestamp)
1332 {
1333 merged_operations.remove(ix);
1334 operation_to_undo = Some(operation.timestamp);
1335 }
1336
1337 self.apply_ops([operation.clone()], cx);
1338
1339 if let Some(timestamp) = operation_to_undo {
1340 let counts = [(timestamp, u32::MAX)].into_iter().collect();
1341 self.undo_operations(counts, cx);
1342 }
1343 }
1344
1345 #[cfg(test)]
1346 pub(crate) fn as_text_snapshot(&self) -> &text::BufferSnapshot {
1347 &self.text
1348 }
1349
1350 /// Retrieve a snapshot of the buffer's raw text, without any
1351 /// language-related state like the syntax tree or diagnostics.
1352 pub fn text_snapshot(&self) -> text::BufferSnapshot {
1353 self.text.snapshot()
1354 }
1355
1356 /// The file associated with the buffer, if any.
1357 pub fn file(&self) -> Option<&Arc<dyn File>> {
1358 self.file.as_ref()
1359 }
1360
1361 /// The version of the buffer that was last saved or reloaded from disk.
1362 pub fn saved_version(&self) -> &clock::Global {
1363 &self.saved_version
1364 }
1365
1366 /// The mtime of the buffer's file when the buffer was last saved or reloaded from disk.
1367 pub fn saved_mtime(&self) -> Option<MTime> {
1368 self.saved_mtime
1369 }
1370
1371 /// Assign a language to the buffer.
1372 pub fn set_language_async(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1373 self.set_language_(language, cfg!(any(test, feature = "test-support")), cx);
1374 }
1375
1376 /// Assign a language to the buffer, blocking for up to 1ms to reparse the buffer.
1377 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1378 self.set_language_(language, true, cx);
1379 }
1380
1381 fn set_language_(
1382 &mut self,
1383 language: Option<Arc<Language>>,
1384 may_block: bool,
1385 cx: &mut Context<Self>,
1386 ) {
1387 self.non_text_state_update_count += 1;
1388 self.syntax_map.lock().clear(&self.text);
1389 self.language = language;
1390 self.was_changed();
1391 self.reparse(cx, may_block);
1392 cx.emit(BufferEvent::LanguageChanged);
1393 }
1394
1395 /// Assign a language registry to the buffer. This allows the buffer to retrieve
1396 /// other languages if parts of the buffer are written in different languages.
1397 pub fn set_language_registry(&self, language_registry: Arc<LanguageRegistry>) {
1398 self.syntax_map
1399 .lock()
1400 .set_language_registry(language_registry);
1401 }
1402
1403 pub fn language_registry(&self) -> Option<Arc<LanguageRegistry>> {
1404 self.syntax_map.lock().language_registry()
1405 }
1406
1407 /// Assign the line ending type to the buffer.
1408 pub fn set_line_ending(&mut self, line_ending: LineEnding, cx: &mut Context<Self>) {
1409 self.text.set_line_ending(line_ending);
1410
1411 let lamport_timestamp = self.text.lamport_clock.tick();
1412 self.send_operation(
1413 Operation::UpdateLineEnding {
1414 line_ending,
1415 lamport_timestamp,
1416 },
1417 true,
1418 cx,
1419 );
1420 }
1421
1422 /// Assign the buffer a new [`Capability`].
1423 pub fn set_capability(&mut self, capability: Capability, cx: &mut Context<Self>) {
1424 if self.capability != capability {
1425 self.capability = capability;
1426 cx.emit(BufferEvent::CapabilityChanged)
1427 }
1428 }
1429
1430 /// This method is called to signal that the buffer has been saved.
1431 pub fn did_save(
1432 &mut self,
1433 version: clock::Global,
1434 mtime: Option<MTime>,
1435 cx: &mut Context<Self>,
1436 ) {
1437 self.saved_version = version.clone();
1438 self.has_unsaved_edits.set((version, false));
1439 self.has_conflict = false;
1440 self.saved_mtime = mtime;
1441 self.was_changed();
1442 cx.emit(BufferEvent::Saved);
1443 cx.notify();
1444 }
1445
1446 /// Reloads the contents of the buffer from disk.
1447 pub fn reload(&mut self, cx: &Context<Self>) -> oneshot::Receiver<Option<Transaction>> {
1448 let (tx, rx) = futures::channel::oneshot::channel();
1449 let prev_version = self.text.version();
1450 self.reload_task = Some(cx.spawn(async move |this, cx| {
1451 let Some((new_mtime, new_text)) = this.update(cx, |this, cx| {
1452 let file = this.file.as_ref()?.as_local()?;
1453
1454 Some((file.disk_state().mtime(), file.load(cx)))
1455 })?
1456 else {
1457 return Ok(());
1458 };
1459
1460 let new_text = new_text.await?;
1461 let diff = this
1462 .update(cx, |this, cx| this.diff(new_text.clone(), cx))?
1463 .await;
1464 this.update(cx, |this, cx| {
1465 if this.version() == diff.base_version {
1466 this.finalize_last_transaction();
1467 this.apply_diff(diff, cx);
1468 tx.send(this.finalize_last_transaction().cloned()).ok();
1469 this.has_conflict = false;
1470 this.did_reload(this.version(), this.line_ending(), new_mtime, cx);
1471 } else {
1472 if !diff.edits.is_empty()
1473 || this
1474 .edits_since::<usize>(&diff.base_version)
1475 .next()
1476 .is_some()
1477 {
1478 this.has_conflict = true;
1479 }
1480
1481 this.did_reload(prev_version, this.line_ending(), this.saved_mtime, cx);
1482 }
1483
1484 this.reload_task.take();
1485 })
1486 }));
1487 rx
1488 }
1489
1490 /// This method is called to signal that the buffer has been reloaded.
1491 pub fn did_reload(
1492 &mut self,
1493 version: clock::Global,
1494 line_ending: LineEnding,
1495 mtime: Option<MTime>,
1496 cx: &mut Context<Self>,
1497 ) {
1498 self.saved_version = version;
1499 self.has_unsaved_edits
1500 .set((self.saved_version.clone(), false));
1501 self.text.set_line_ending(line_ending);
1502 self.saved_mtime = mtime;
1503 cx.emit(BufferEvent::Reloaded);
1504 cx.notify();
1505 }
1506
1507 /// Updates the [`File`] backing this buffer. This should be called when
1508 /// the file has changed or has been deleted.
1509 pub fn file_updated(&mut self, new_file: Arc<dyn File>, cx: &mut Context<Self>) {
1510 let was_dirty = self.is_dirty();
1511 let mut file_changed = false;
1512
1513 if let Some(old_file) = self.file.as_ref() {
1514 if new_file.path() != old_file.path() {
1515 file_changed = true;
1516 }
1517
1518 let old_state = old_file.disk_state();
1519 let new_state = new_file.disk_state();
1520 if old_state != new_state {
1521 file_changed = true;
1522 if !was_dirty && matches!(new_state, DiskState::Present { .. }) {
1523 cx.emit(BufferEvent::ReloadNeeded)
1524 }
1525 }
1526 } else {
1527 file_changed = true;
1528 };
1529
1530 self.file = Some(new_file);
1531 if file_changed {
1532 self.was_changed();
1533 self.non_text_state_update_count += 1;
1534 if was_dirty != self.is_dirty() {
1535 cx.emit(BufferEvent::DirtyChanged);
1536 }
1537 cx.emit(BufferEvent::FileHandleChanged);
1538 cx.notify();
1539 }
1540 }
1541
1542 pub fn base_buffer(&self) -> Option<Entity<Self>> {
1543 Some(self.branch_state.as_ref()?.base_buffer.clone())
1544 }
1545
1546 /// Returns the primary [`Language`] assigned to this [`Buffer`].
1547 pub fn language(&self) -> Option<&Arc<Language>> {
1548 self.language.as_ref()
1549 }
1550
1551 /// Returns the [`Language`] at the given location.
1552 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
1553 let offset = position.to_offset(self);
1554 let mut is_first = true;
1555 let start_anchor = self.anchor_before(offset);
1556 let end_anchor = self.anchor_after(offset);
1557 self.syntax_map
1558 .lock()
1559 .layers_for_range(offset..offset, &self.text, false)
1560 .filter(|layer| {
1561 if is_first {
1562 is_first = false;
1563 return true;
1564 }
1565
1566 layer
1567 .included_sub_ranges
1568 .map(|sub_ranges| {
1569 sub_ranges.iter().any(|sub_range| {
1570 let is_before_start = sub_range.end.cmp(&start_anchor, self).is_lt();
1571 let is_after_end = sub_range.start.cmp(&end_anchor, self).is_gt();
1572 !is_before_start && !is_after_end
1573 })
1574 })
1575 .unwrap_or(true)
1576 })
1577 .last()
1578 .map(|info| info.language.clone())
1579 .or_else(|| self.language.clone())
1580 }
1581
1582 /// Returns each [`Language`] for the active syntax layers at the given location.
1583 pub fn languages_at<D: ToOffset>(&self, position: D) -> Vec<Arc<Language>> {
1584 let offset = position.to_offset(self);
1585 let mut languages: Vec<Arc<Language>> = self
1586 .syntax_map
1587 .lock()
1588 .layers_for_range(offset..offset, &self.text, false)
1589 .map(|info| info.language.clone())
1590 .collect();
1591
1592 if languages.is_empty()
1593 && let Some(buffer_language) = self.language()
1594 {
1595 languages.push(buffer_language.clone());
1596 }
1597
1598 languages
1599 }
1600
1601 /// An integer version number that accounts for all updates besides
1602 /// the buffer's text itself (which is versioned via a version vector).
1603 pub fn non_text_state_update_count(&self) -> usize {
1604 self.non_text_state_update_count
1605 }
1606
1607 /// Whether the buffer is being parsed in the background.
1608 #[cfg(any(test, feature = "test-support"))]
1609 pub fn is_parsing(&self) -> bool {
1610 self.reparse.is_some()
1611 }
1612
1613 /// Indicates whether the buffer contains any regions that may be
1614 /// written in a language that hasn't been loaded yet.
1615 pub fn contains_unknown_injections(&self) -> bool {
1616 self.syntax_map.lock().contains_unknown_injections()
1617 }
1618
1619 #[cfg(any(test, feature = "test-support"))]
1620 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
1621 self.sync_parse_timeout = timeout;
1622 }
1623
1624 /// Called after an edit to synchronize the buffer's main parse tree with
1625 /// the buffer's new underlying state.
1626 ///
1627 /// Locks the syntax map and interpolates the edits since the last reparse
1628 /// into the foreground syntax tree.
1629 ///
1630 /// Then takes a stable snapshot of the syntax map before unlocking it.
1631 /// The snapshot with the interpolated edits is sent to a background thread,
1632 /// where we ask Tree-sitter to perform an incremental parse.
1633 ///
1634 /// Meanwhile, in the foreground if `may_block` is true, we block the main
1635 /// thread for up to 1ms waiting on the parse to complete. As soon as it
1636 /// completes, we proceed synchronously, unless a 1ms timeout elapses.
1637 ///
1638 /// If we time out waiting on the parse, we spawn a second task waiting
1639 /// until the parse does complete and return with the interpolated tree still
1640 /// in the foreground. When the background parse completes, call back into
1641 /// the main thread and assign the foreground parse state.
1642 ///
1643 /// If the buffer or grammar changed since the start of the background parse,
1644 /// initiate an additional reparse recursively. To avoid concurrent parses
1645 /// for the same buffer, we only initiate a new parse if we are not already
1646 /// parsing in the background.
1647 pub fn reparse(&mut self, cx: &mut Context<Self>, may_block: bool) {
1648 if self.reparse.is_some() {
1649 return;
1650 }
1651 let language = if let Some(language) = self.language.clone() {
1652 language
1653 } else {
1654 return;
1655 };
1656
1657 let text = self.text_snapshot();
1658 let parsed_version = self.version();
1659
1660 let mut syntax_map = self.syntax_map.lock();
1661 syntax_map.interpolate(&text);
1662 let language_registry = syntax_map.language_registry();
1663 let mut syntax_snapshot = syntax_map.snapshot();
1664 drop(syntax_map);
1665
1666 let parse_task = cx.background_spawn({
1667 let language = language.clone();
1668 let language_registry = language_registry.clone();
1669 async move {
1670 syntax_snapshot.reparse(&text, language_registry, language);
1671 syntax_snapshot
1672 }
1673 });
1674
1675 self.parse_status.0.send(ParseStatus::Parsing).unwrap();
1676 if may_block {
1677 match cx
1678 .background_executor()
1679 .block_with_timeout(self.sync_parse_timeout, parse_task)
1680 {
1681 Ok(new_syntax_snapshot) => {
1682 self.did_finish_parsing(new_syntax_snapshot, cx);
1683 self.reparse = None;
1684 }
1685 Err(parse_task) => {
1686 self.reparse = Some(cx.spawn(async move |this, cx| {
1687 let new_syntax_map = cx.background_spawn(parse_task).await;
1688 this.update(cx, move |this, cx| {
1689 let grammar_changed = || {
1690 this.language.as_ref().is_none_or(|current_language| {
1691 !Arc::ptr_eq(&language, current_language)
1692 })
1693 };
1694 let language_registry_changed = || {
1695 new_syntax_map.contains_unknown_injections()
1696 && language_registry.is_some_and(|registry| {
1697 registry.version()
1698 != new_syntax_map.language_registry_version()
1699 })
1700 };
1701 let parse_again = this.version.changed_since(&parsed_version)
1702 || language_registry_changed()
1703 || grammar_changed();
1704 this.did_finish_parsing(new_syntax_map, cx);
1705 this.reparse = None;
1706 if parse_again {
1707 this.reparse(cx, false);
1708 }
1709 })
1710 .ok();
1711 }));
1712 }
1713 }
1714 } else {
1715 self.reparse = Some(cx.spawn(async move |this, cx| {
1716 let new_syntax_map = cx.background_spawn(parse_task).await;
1717 this.update(cx, move |this, cx| {
1718 let grammar_changed = || {
1719 this.language.as_ref().is_none_or(|current_language| {
1720 !Arc::ptr_eq(&language, current_language)
1721 })
1722 };
1723 let language_registry_changed = || {
1724 new_syntax_map.contains_unknown_injections()
1725 && language_registry.is_some_and(|registry| {
1726 registry.version() != new_syntax_map.language_registry_version()
1727 })
1728 };
1729 let parse_again = this.version.changed_since(&parsed_version)
1730 || language_registry_changed()
1731 || grammar_changed();
1732 this.did_finish_parsing(new_syntax_map, cx);
1733 this.reparse = None;
1734 if parse_again {
1735 this.reparse(cx, false);
1736 }
1737 })
1738 .ok();
1739 }));
1740 }
1741 }
1742
1743 fn did_finish_parsing(&mut self, syntax_snapshot: SyntaxSnapshot, cx: &mut Context<Self>) {
1744 self.was_changed();
1745 self.non_text_state_update_count += 1;
1746 self.syntax_map.lock().did_parse(syntax_snapshot);
1747 self.request_autoindent(cx);
1748 self.parse_status.0.send(ParseStatus::Idle).unwrap();
1749 self.tree_sitter_data.lock().clear();
1750 cx.emit(BufferEvent::Reparsed);
1751 cx.notify();
1752 }
1753
1754 pub fn parse_status(&self) -> watch::Receiver<ParseStatus> {
1755 self.parse_status.1.clone()
1756 }
1757
1758 /// Wait until the buffer is no longer parsing
1759 pub fn parsing_idle(&self) -> impl Future<Output = ()> + use<> {
1760 let mut parse_status = self.parse_status();
1761 async move {
1762 while *parse_status.borrow() != ParseStatus::Idle {
1763 if parse_status.changed().await.is_err() {
1764 break;
1765 }
1766 }
1767 }
1768 }
1769
1770 /// Assign to the buffer a set of diagnostics created by a given language server.
1771 pub fn update_diagnostics(
1772 &mut self,
1773 server_id: LanguageServerId,
1774 diagnostics: DiagnosticSet,
1775 cx: &mut Context<Self>,
1776 ) {
1777 let lamport_timestamp = self.text.lamport_clock.tick();
1778 let op = Operation::UpdateDiagnostics {
1779 server_id,
1780 diagnostics: diagnostics.iter().cloned().collect(),
1781 lamport_timestamp,
1782 };
1783
1784 self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
1785 self.send_operation(op, true, cx);
1786 }
1787
1788 pub fn buffer_diagnostics(
1789 &self,
1790 for_server: Option<LanguageServerId>,
1791 ) -> Vec<&DiagnosticEntry<Anchor>> {
1792 match for_server {
1793 Some(server_id) => match self.diagnostics.binary_search_by_key(&server_id, |v| v.0) {
1794 Ok(idx) => self.diagnostics[idx].1.iter().collect(),
1795 Err(_) => Vec::new(),
1796 },
1797 None => self
1798 .diagnostics
1799 .iter()
1800 .flat_map(|(_, diagnostic_set)| diagnostic_set.iter())
1801 .collect(),
1802 }
1803 }
1804
1805 fn request_autoindent(&mut self, cx: &mut Context<Self>) {
1806 if let Some(indent_sizes) = self.compute_autoindents() {
1807 let indent_sizes = cx.background_spawn(indent_sizes);
1808 match cx
1809 .background_executor()
1810 .block_with_timeout(Duration::from_micros(500), indent_sizes)
1811 {
1812 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
1813 Err(indent_sizes) => {
1814 self.pending_autoindent = Some(cx.spawn(async move |this, cx| {
1815 let indent_sizes = indent_sizes.await;
1816 this.update(cx, |this, cx| {
1817 this.apply_autoindents(indent_sizes, cx);
1818 })
1819 .ok();
1820 }));
1821 }
1822 }
1823 } else {
1824 self.autoindent_requests.clear();
1825 for tx in self.wait_for_autoindent_txs.drain(..) {
1826 tx.send(()).ok();
1827 }
1828 }
1829 }
1830
1831 fn compute_autoindents(
1832 &self,
1833 ) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>> + use<>> {
1834 let max_rows_between_yields = 100;
1835 let snapshot = self.snapshot();
1836 if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
1837 return None;
1838 }
1839
1840 let autoindent_requests = self.autoindent_requests.clone();
1841 Some(async move {
1842 let mut indent_sizes = BTreeMap::<u32, (IndentSize, bool)>::new();
1843 for request in autoindent_requests {
1844 // Resolve each edited range to its row in the current buffer and in the
1845 // buffer before this batch of edits.
1846 let mut row_ranges = Vec::new();
1847 let mut old_to_new_rows = BTreeMap::new();
1848 let mut language_indent_sizes_by_new_row = Vec::new();
1849 for entry in &request.entries {
1850 let position = entry.range.start;
1851 let new_row = position.to_point(&snapshot).row;
1852 let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
1853 language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
1854
1855 if !entry.first_line_is_new {
1856 let old_row = position.to_point(&request.before_edit).row;
1857 old_to_new_rows.insert(old_row, new_row);
1858 }
1859 row_ranges.push((new_row..new_end_row, entry.original_indent_column));
1860 }
1861
1862 // Build a map containing the suggested indentation for each of the edited lines
1863 // with respect to the state of the buffer before these edits. This map is keyed
1864 // by the rows for these lines in the current state of the buffer.
1865 let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
1866 let old_edited_ranges =
1867 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
1868 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1869 let mut language_indent_size = IndentSize::default();
1870 for old_edited_range in old_edited_ranges {
1871 let suggestions = request
1872 .before_edit
1873 .suggest_autoindents(old_edited_range.clone())
1874 .into_iter()
1875 .flatten();
1876 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1877 if let Some(suggestion) = suggestion {
1878 let new_row = *old_to_new_rows.get(&old_row).unwrap();
1879
1880 // Find the indent size based on the language for this row.
1881 while let Some((row, size)) = language_indent_sizes.peek() {
1882 if *row > new_row {
1883 break;
1884 }
1885 language_indent_size = *size;
1886 language_indent_sizes.next();
1887 }
1888
1889 let suggested_indent = old_to_new_rows
1890 .get(&suggestion.basis_row)
1891 .and_then(|from_row| {
1892 Some(old_suggestions.get(from_row).copied()?.0)
1893 })
1894 .unwrap_or_else(|| {
1895 request
1896 .before_edit
1897 .indent_size_for_line(suggestion.basis_row)
1898 })
1899 .with_delta(suggestion.delta, language_indent_size);
1900 old_suggestions
1901 .insert(new_row, (suggested_indent, suggestion.within_error));
1902 }
1903 }
1904 yield_now().await;
1905 }
1906
1907 // Compute new suggestions for each line, but only include them in the result
1908 // if they differ from the old suggestion for that line.
1909 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1910 let mut language_indent_size = IndentSize::default();
1911 for (row_range, original_indent_column) in row_ranges {
1912 let new_edited_row_range = if request.is_block_mode {
1913 row_range.start..row_range.start + 1
1914 } else {
1915 row_range.clone()
1916 };
1917
1918 let suggestions = snapshot
1919 .suggest_autoindents(new_edited_row_range.clone())
1920 .into_iter()
1921 .flatten();
1922 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
1923 if let Some(suggestion) = suggestion {
1924 // Find the indent size based on the language for this row.
1925 while let Some((row, size)) = language_indent_sizes.peek() {
1926 if *row > new_row {
1927 break;
1928 }
1929 language_indent_size = *size;
1930 language_indent_sizes.next();
1931 }
1932
1933 let suggested_indent = indent_sizes
1934 .get(&suggestion.basis_row)
1935 .copied()
1936 .map(|e| e.0)
1937 .unwrap_or_else(|| {
1938 snapshot.indent_size_for_line(suggestion.basis_row)
1939 })
1940 .with_delta(suggestion.delta, language_indent_size);
1941
1942 if old_suggestions.get(&new_row).is_none_or(
1943 |(old_indentation, was_within_error)| {
1944 suggested_indent != *old_indentation
1945 && (!suggestion.within_error || *was_within_error)
1946 },
1947 ) {
1948 indent_sizes.insert(
1949 new_row,
1950 (suggested_indent, request.ignore_empty_lines),
1951 );
1952 }
1953 }
1954 }
1955
1956 if let (true, Some(original_indent_column)) =
1957 (request.is_block_mode, original_indent_column)
1958 {
1959 let new_indent =
1960 if let Some((indent, _)) = indent_sizes.get(&row_range.start) {
1961 *indent
1962 } else {
1963 snapshot.indent_size_for_line(row_range.start)
1964 };
1965 let delta = new_indent.len as i64 - original_indent_column as i64;
1966 if delta != 0 {
1967 for row in row_range.skip(1) {
1968 indent_sizes.entry(row).or_insert_with(|| {
1969 let mut size = snapshot.indent_size_for_line(row);
1970 if size.kind == new_indent.kind {
1971 match delta.cmp(&0) {
1972 Ordering::Greater => size.len += delta as u32,
1973 Ordering::Less => {
1974 size.len = size.len.saturating_sub(-delta as u32)
1975 }
1976 Ordering::Equal => {}
1977 }
1978 }
1979 (size, request.ignore_empty_lines)
1980 });
1981 }
1982 }
1983 }
1984
1985 yield_now().await;
1986 }
1987 }
1988
1989 indent_sizes
1990 .into_iter()
1991 .filter_map(|(row, (indent, ignore_empty_lines))| {
1992 if ignore_empty_lines && snapshot.line_len(row) == 0 {
1993 None
1994 } else {
1995 Some((row, indent))
1996 }
1997 })
1998 .collect()
1999 })
2000 }
2001
2002 fn apply_autoindents(
2003 &mut self,
2004 indent_sizes: BTreeMap<u32, IndentSize>,
2005 cx: &mut Context<Self>,
2006 ) {
2007 self.autoindent_requests.clear();
2008 for tx in self.wait_for_autoindent_txs.drain(..) {
2009 tx.send(()).ok();
2010 }
2011
2012 let edits: Vec<_> = indent_sizes
2013 .into_iter()
2014 .filter_map(|(row, indent_size)| {
2015 let current_size = indent_size_for_line(self, row);
2016 Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
2017 })
2018 .collect();
2019
2020 let preserve_preview = self.preserve_preview();
2021 self.edit(edits, None, cx);
2022 if preserve_preview {
2023 self.refresh_preview();
2024 }
2025 }
2026
2027 /// Create a minimal edit that will cause the given row to be indented
2028 /// with the given size. After applying this edit, the length of the line
2029 /// will always be at least `new_size.len`.
2030 pub fn edit_for_indent_size_adjustment(
2031 row: u32,
2032 current_size: IndentSize,
2033 new_size: IndentSize,
2034 ) -> Option<(Range<Point>, String)> {
2035 if new_size.kind == current_size.kind {
2036 match new_size.len.cmp(¤t_size.len) {
2037 Ordering::Greater => {
2038 let point = Point::new(row, 0);
2039 Some((
2040 point..point,
2041 iter::repeat(new_size.char())
2042 .take((new_size.len - current_size.len) as usize)
2043 .collect::<String>(),
2044 ))
2045 }
2046
2047 Ordering::Less => Some((
2048 Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
2049 String::new(),
2050 )),
2051
2052 Ordering::Equal => None,
2053 }
2054 } else {
2055 Some((
2056 Point::new(row, 0)..Point::new(row, current_size.len),
2057 iter::repeat(new_size.char())
2058 .take(new_size.len as usize)
2059 .collect::<String>(),
2060 ))
2061 }
2062 }
2063
2064 /// Spawns a background task that asynchronously computes a `Diff` between the buffer's text
2065 /// and the given new text.
2066 pub fn diff(&self, mut new_text: String, cx: &App) -> Task<Diff> {
2067 let old_text = self.as_rope().clone();
2068 let base_version = self.version();
2069 cx.background_executor()
2070 .spawn_labeled(*BUFFER_DIFF_TASK, async move {
2071 let old_text = old_text.to_string();
2072 let line_ending = LineEnding::detect(&new_text);
2073 LineEnding::normalize(&mut new_text);
2074 let edits = text_diff(&old_text, &new_text);
2075 Diff {
2076 base_version,
2077 line_ending,
2078 edits,
2079 }
2080 })
2081 }
2082
2083 /// Spawns a background task that searches the buffer for any whitespace
2084 /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
2085 pub fn remove_trailing_whitespace(&self, cx: &App) -> Task<Diff> {
2086 let old_text = self.as_rope().clone();
2087 let line_ending = self.line_ending();
2088 let base_version = self.version();
2089 cx.background_spawn(async move {
2090 let ranges = trailing_whitespace_ranges(&old_text);
2091 let empty = Arc::<str>::from("");
2092 Diff {
2093 base_version,
2094 line_ending,
2095 edits: ranges
2096 .into_iter()
2097 .map(|range| (range, empty.clone()))
2098 .collect(),
2099 }
2100 })
2101 }
2102
2103 /// Ensures that the buffer ends with a single newline character, and
2104 /// no other whitespace. Skips if the buffer is empty.
2105 pub fn ensure_final_newline(&mut self, cx: &mut Context<Self>) {
2106 let len = self.len();
2107 if len == 0 {
2108 return;
2109 }
2110 let mut offset = len;
2111 for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
2112 let non_whitespace_len = chunk
2113 .trim_end_matches(|c: char| c.is_ascii_whitespace())
2114 .len();
2115 offset -= chunk.len();
2116 offset += non_whitespace_len;
2117 if non_whitespace_len != 0 {
2118 if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
2119 return;
2120 }
2121 break;
2122 }
2123 }
2124 self.edit([(offset..len, "\n")], None, cx);
2125 }
2126
2127 /// Applies a diff to the buffer. If the buffer has changed since the given diff was
2128 /// calculated, then adjust the diff to account for those changes, and discard any
2129 /// parts of the diff that conflict with those changes.
2130 pub fn apply_diff(&mut self, diff: Diff, cx: &mut Context<Self>) -> Option<TransactionId> {
2131 let snapshot = self.snapshot();
2132 let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
2133 let mut delta = 0;
2134 let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
2135 while let Some(edit_since) = edits_since.peek() {
2136 // If the edit occurs after a diff hunk, then it does not
2137 // affect that hunk.
2138 if edit_since.old.start > range.end {
2139 break;
2140 }
2141 // If the edit precedes the diff hunk, then adjust the hunk
2142 // to reflect the edit.
2143 else if edit_since.old.end < range.start {
2144 delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
2145 edits_since.next();
2146 }
2147 // If the edit intersects a diff hunk, then discard that hunk.
2148 else {
2149 return None;
2150 }
2151 }
2152
2153 let start = (range.start as i64 + delta) as usize;
2154 let end = (range.end as i64 + delta) as usize;
2155 Some((start..end, new_text))
2156 });
2157
2158 self.start_transaction();
2159 self.text.set_line_ending(diff.line_ending);
2160 self.edit(adjusted_edits, None, cx);
2161 self.end_transaction(cx)
2162 }
2163
2164 pub fn has_unsaved_edits(&self) -> bool {
2165 let (last_version, has_unsaved_edits) = self.has_unsaved_edits.take();
2166
2167 if last_version == self.version {
2168 self.has_unsaved_edits
2169 .set((last_version, has_unsaved_edits));
2170 return has_unsaved_edits;
2171 }
2172
2173 let has_edits = self.has_edits_since(&self.saved_version);
2174 self.has_unsaved_edits
2175 .set((self.version.clone(), has_edits));
2176 has_edits
2177 }
2178
2179 /// Checks if the buffer has unsaved changes.
2180 pub fn is_dirty(&self) -> bool {
2181 if self.capability == Capability::ReadOnly {
2182 return false;
2183 }
2184 if self.has_conflict {
2185 return true;
2186 }
2187 match self.file.as_ref().map(|f| f.disk_state()) {
2188 Some(DiskState::New) | Some(DiskState::Deleted) => {
2189 !self.is_empty() && self.has_unsaved_edits()
2190 }
2191 _ => self.has_unsaved_edits(),
2192 }
2193 }
2194
2195 /// Marks the buffer as having a conflict regardless of current buffer state.
2196 pub fn set_conflict(&mut self) {
2197 self.has_conflict = true;
2198 }
2199
2200 /// Checks if the buffer and its file have both changed since the buffer
2201 /// was last saved or reloaded.
2202 pub fn has_conflict(&self) -> bool {
2203 if self.has_conflict {
2204 return true;
2205 }
2206 let Some(file) = self.file.as_ref() else {
2207 return false;
2208 };
2209 match file.disk_state() {
2210 DiskState::New => false,
2211 DiskState::Present { mtime } => match self.saved_mtime {
2212 Some(saved_mtime) => {
2213 mtime.bad_is_greater_than(saved_mtime) && self.has_unsaved_edits()
2214 }
2215 None => true,
2216 },
2217 DiskState::Deleted => false,
2218 }
2219 }
2220
2221 /// Gets a [`Subscription`] that tracks all of the changes to the buffer's text.
2222 pub fn subscribe(&mut self) -> Subscription<usize> {
2223 self.text.subscribe()
2224 }
2225
2226 /// Adds a bit to the list of bits that are set when the buffer's text changes.
2227 ///
2228 /// This allows downstream code to check if the buffer's text has changed without
2229 /// waiting for an effect cycle, which would be required if using eents.
2230 pub fn record_changes(&mut self, bit: rc::Weak<Cell<bool>>) {
2231 if let Err(ix) = self
2232 .change_bits
2233 .binary_search_by_key(&rc::Weak::as_ptr(&bit), rc::Weak::as_ptr)
2234 {
2235 self.change_bits.insert(ix, bit);
2236 }
2237 }
2238
2239 /// Set the change bit for all "listeners".
2240 fn was_changed(&mut self) {
2241 self.change_bits.retain(|change_bit| {
2242 change_bit
2243 .upgrade()
2244 .inspect(|bit| {
2245 _ = bit.replace(true);
2246 })
2247 .is_some()
2248 });
2249 }
2250
2251 /// Starts a transaction, if one is not already in-progress. When undoing or
2252 /// redoing edits, all of the edits performed within a transaction are undone
2253 /// or redone together.
2254 pub fn start_transaction(&mut self) -> Option<TransactionId> {
2255 self.start_transaction_at(Instant::now())
2256 }
2257
2258 /// Starts a transaction, providing the current time. Subsequent transactions
2259 /// that occur within a short period of time will be grouped together. This
2260 /// is controlled by the buffer's undo grouping duration.
2261 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
2262 self.transaction_depth += 1;
2263 if self.was_dirty_before_starting_transaction.is_none() {
2264 self.was_dirty_before_starting_transaction = Some(self.is_dirty());
2265 }
2266 self.text.start_transaction_at(now)
2267 }
2268
2269 /// Terminates the current transaction, if this is the outermost transaction.
2270 pub fn end_transaction(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2271 self.end_transaction_at(Instant::now(), cx)
2272 }
2273
2274 /// Terminates the current transaction, providing the current time. Subsequent transactions
2275 /// that occur within a short period of time will be grouped together. This
2276 /// is controlled by the buffer's undo grouping duration.
2277 pub fn end_transaction_at(
2278 &mut self,
2279 now: Instant,
2280 cx: &mut Context<Self>,
2281 ) -> Option<TransactionId> {
2282 assert!(self.transaction_depth > 0);
2283 self.transaction_depth -= 1;
2284 let was_dirty = if self.transaction_depth == 0 {
2285 self.was_dirty_before_starting_transaction.take().unwrap()
2286 } else {
2287 false
2288 };
2289 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
2290 self.did_edit(&start_version, was_dirty, cx);
2291 Some(transaction_id)
2292 } else {
2293 None
2294 }
2295 }
2296
2297 /// Manually add a transaction to the buffer's undo history.
2298 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
2299 self.text.push_transaction(transaction, now);
2300 }
2301
2302 /// Differs from `push_transaction` in that it does not clear the redo
2303 /// stack. Intended to be used to create a parent transaction to merge
2304 /// potential child transactions into.
2305 ///
2306 /// The caller is responsible for removing it from the undo history using
2307 /// `forget_transaction` if no edits are merged into it. Otherwise, if edits
2308 /// are merged into this transaction, the caller is responsible for ensuring
2309 /// the redo stack is cleared. The easiest way to ensure the redo stack is
2310 /// cleared is to create transactions with the usual `start_transaction` and
2311 /// `end_transaction` methods and merging the resulting transactions into
2312 /// the transaction created by this method
2313 pub fn push_empty_transaction(&mut self, now: Instant) -> TransactionId {
2314 self.text.push_empty_transaction(now)
2315 }
2316
2317 /// Prevent the last transaction from being grouped with any subsequent transactions,
2318 /// even if they occur with the buffer's undo grouping duration.
2319 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
2320 self.text.finalize_last_transaction()
2321 }
2322
2323 /// Manually group all changes since a given transaction.
2324 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
2325 self.text.group_until_transaction(transaction_id);
2326 }
2327
2328 /// Manually remove a transaction from the buffer's undo history
2329 pub fn forget_transaction(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
2330 self.text.forget_transaction(transaction_id)
2331 }
2332
2333 /// Retrieve a transaction from the buffer's undo history
2334 pub fn get_transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> {
2335 self.text.get_transaction(transaction_id)
2336 }
2337
2338 /// Manually merge two transactions in the buffer's undo history.
2339 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
2340 self.text.merge_transactions(transaction, destination);
2341 }
2342
2343 /// Waits for the buffer to receive operations with the given timestamps.
2344 pub fn wait_for_edits<It: IntoIterator<Item = clock::Lamport>>(
2345 &mut self,
2346 edit_ids: It,
2347 ) -> impl Future<Output = Result<()>> + use<It> {
2348 self.text.wait_for_edits(edit_ids)
2349 }
2350
2351 /// Waits for the buffer to receive the operations necessary for resolving the given anchors.
2352 pub fn wait_for_anchors<It: IntoIterator<Item = Anchor>>(
2353 &mut self,
2354 anchors: It,
2355 ) -> impl 'static + Future<Output = Result<()>> + use<It> {
2356 self.text.wait_for_anchors(anchors)
2357 }
2358
2359 /// Waits for the buffer to receive operations up to the given version.
2360 pub fn wait_for_version(
2361 &mut self,
2362 version: clock::Global,
2363 ) -> impl Future<Output = Result<()>> + use<> {
2364 self.text.wait_for_version(version)
2365 }
2366
2367 /// Forces all futures returned by [`Buffer::wait_for_version`], [`Buffer::wait_for_edits`], or
2368 /// [`Buffer::wait_for_version`] to resolve with an error.
2369 pub fn give_up_waiting(&mut self) {
2370 self.text.give_up_waiting();
2371 }
2372
2373 pub fn wait_for_autoindent_applied(&mut self) -> Option<oneshot::Receiver<()>> {
2374 let mut rx = None;
2375 if !self.autoindent_requests.is_empty() {
2376 let channel = oneshot::channel();
2377 self.wait_for_autoindent_txs.push(channel.0);
2378 rx = Some(channel.1);
2379 }
2380 rx
2381 }
2382
2383 /// Stores a set of selections that should be broadcasted to all of the buffer's replicas.
2384 pub fn set_active_selections(
2385 &mut self,
2386 selections: Arc<[Selection<Anchor>]>,
2387 line_mode: bool,
2388 cursor_shape: CursorShape,
2389 cx: &mut Context<Self>,
2390 ) {
2391 let lamport_timestamp = self.text.lamport_clock.tick();
2392 self.remote_selections.insert(
2393 self.text.replica_id(),
2394 SelectionSet {
2395 selections: selections.clone(),
2396 lamport_timestamp,
2397 line_mode,
2398 cursor_shape,
2399 },
2400 );
2401 self.send_operation(
2402 Operation::UpdateSelections {
2403 selections,
2404 line_mode,
2405 lamport_timestamp,
2406 cursor_shape,
2407 },
2408 true,
2409 cx,
2410 );
2411 self.non_text_state_update_count += 1;
2412 cx.notify();
2413 }
2414
2415 /// Clears the selections, so that other replicas of the buffer do not see any selections for
2416 /// this replica.
2417 pub fn remove_active_selections(&mut self, cx: &mut Context<Self>) {
2418 if self
2419 .remote_selections
2420 .get(&self.text.replica_id())
2421 .is_none_or(|set| !set.selections.is_empty())
2422 {
2423 self.set_active_selections(Arc::default(), false, Default::default(), cx);
2424 }
2425 }
2426
2427 pub fn set_agent_selections(
2428 &mut self,
2429 selections: Arc<[Selection<Anchor>]>,
2430 line_mode: bool,
2431 cursor_shape: CursorShape,
2432 cx: &mut Context<Self>,
2433 ) {
2434 let lamport_timestamp = self.text.lamport_clock.tick();
2435 self.remote_selections.insert(
2436 ReplicaId::AGENT,
2437 SelectionSet {
2438 selections,
2439 lamport_timestamp,
2440 line_mode,
2441 cursor_shape,
2442 },
2443 );
2444 self.non_text_state_update_count += 1;
2445 cx.notify();
2446 }
2447
2448 pub fn remove_agent_selections(&mut self, cx: &mut Context<Self>) {
2449 self.set_agent_selections(Arc::default(), false, Default::default(), cx);
2450 }
2451
2452 /// Replaces the buffer's entire text.
2453 pub fn set_text<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2454 where
2455 T: Into<Arc<str>>,
2456 {
2457 self.autoindent_requests.clear();
2458 self.edit([(0..self.len(), text)], None, cx)
2459 }
2460
2461 /// Appends the given text to the end of the buffer.
2462 pub fn append<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2463 where
2464 T: Into<Arc<str>>,
2465 {
2466 self.edit([(self.len()..self.len(), text)], None, cx)
2467 }
2468
2469 /// Applies the given edits to the buffer. Each edit is specified as a range of text to
2470 /// delete, and a string of text to insert at that location.
2471 ///
2472 /// If an [`AutoindentMode`] is provided, then the buffer will enqueue an auto-indent
2473 /// request for the edited ranges, which will be processed when the buffer finishes
2474 /// parsing.
2475 ///
2476 /// Parsing takes place at the end of a transaction, and may compute synchronously
2477 /// or asynchronously, depending on the changes.
2478 pub fn edit<I, S, T>(
2479 &mut self,
2480 edits_iter: I,
2481 autoindent_mode: Option<AutoindentMode>,
2482 cx: &mut Context<Self>,
2483 ) -> Option<clock::Lamport>
2484 where
2485 I: IntoIterator<Item = (Range<S>, T)>,
2486 S: ToOffset,
2487 T: Into<Arc<str>>,
2488 {
2489 // Skip invalid edits and coalesce contiguous ones.
2490 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
2491
2492 for (range, new_text) in edits_iter {
2493 let mut range = range.start.to_offset(self)..range.end.to_offset(self);
2494
2495 if range.start > range.end {
2496 mem::swap(&mut range.start, &mut range.end);
2497 }
2498 let new_text = new_text.into();
2499 if !new_text.is_empty() || !range.is_empty() {
2500 if let Some((prev_range, prev_text)) = edits.last_mut()
2501 && prev_range.end >= range.start
2502 {
2503 prev_range.end = cmp::max(prev_range.end, range.end);
2504 *prev_text = format!("{prev_text}{new_text}").into();
2505 } else {
2506 edits.push((range, new_text));
2507 }
2508 }
2509 }
2510 if edits.is_empty() {
2511 return None;
2512 }
2513
2514 self.start_transaction();
2515 self.pending_autoindent.take();
2516 let autoindent_request = autoindent_mode
2517 .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
2518
2519 let edit_operation = self.text.edit(edits.iter().cloned());
2520 let edit_id = edit_operation.timestamp();
2521
2522 if let Some((before_edit, mode)) = autoindent_request {
2523 let mut delta = 0isize;
2524 let mut previous_setting = None;
2525 let entries: Vec<_> = edits
2526 .into_iter()
2527 .enumerate()
2528 .zip(&edit_operation.as_edit().unwrap().new_text)
2529 .filter(|((_, (range, _)), _)| {
2530 let language = before_edit.language_at(range.start);
2531 let language_id = language.map(|l| l.id());
2532 if let Some((cached_language_id, auto_indent)) = previous_setting
2533 && cached_language_id == language_id
2534 {
2535 auto_indent
2536 } else {
2537 // The auto-indent setting is not present in editorconfigs, hence
2538 // we can avoid passing the file here.
2539 let auto_indent =
2540 language_settings(language.map(|l| l.name()), None, cx).auto_indent;
2541 previous_setting = Some((language_id, auto_indent));
2542 auto_indent
2543 }
2544 })
2545 .map(|((ix, (range, _)), new_text)| {
2546 let new_text_length = new_text.len();
2547 let old_start = range.start.to_point(&before_edit);
2548 let new_start = (delta + range.start as isize) as usize;
2549 let range_len = range.end - range.start;
2550 delta += new_text_length as isize - range_len as isize;
2551
2552 // Decide what range of the insertion to auto-indent, and whether
2553 // the first line of the insertion should be considered a newly-inserted line
2554 // or an edit to an existing line.
2555 let mut range_of_insertion_to_indent = 0..new_text_length;
2556 let mut first_line_is_new = true;
2557
2558 let old_line_start = before_edit.indent_size_for_line(old_start.row).len;
2559 let old_line_end = before_edit.line_len(old_start.row);
2560
2561 if old_start.column > old_line_start {
2562 first_line_is_new = false;
2563 }
2564
2565 if !new_text.contains('\n')
2566 && (old_start.column + (range_len as u32) < old_line_end
2567 || old_line_end == old_line_start)
2568 {
2569 first_line_is_new = false;
2570 }
2571
2572 // When inserting text starting with a newline, avoid auto-indenting the
2573 // previous line.
2574 if new_text.starts_with('\n') {
2575 range_of_insertion_to_indent.start += 1;
2576 first_line_is_new = true;
2577 }
2578
2579 let mut original_indent_column = None;
2580 if let AutoindentMode::Block {
2581 original_indent_columns,
2582 } = &mode
2583 {
2584 original_indent_column = Some(if new_text.starts_with('\n') {
2585 indent_size_for_text(
2586 new_text[range_of_insertion_to_indent.clone()].chars(),
2587 )
2588 .len
2589 } else {
2590 original_indent_columns
2591 .get(ix)
2592 .copied()
2593 .flatten()
2594 .unwrap_or_else(|| {
2595 indent_size_for_text(
2596 new_text[range_of_insertion_to_indent.clone()].chars(),
2597 )
2598 .len
2599 })
2600 });
2601
2602 // Avoid auto-indenting the line after the edit.
2603 if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
2604 range_of_insertion_to_indent.end -= 1;
2605 }
2606 }
2607
2608 AutoindentRequestEntry {
2609 first_line_is_new,
2610 original_indent_column,
2611 indent_size: before_edit.language_indent_size_at(range.start, cx),
2612 range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
2613 ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
2614 }
2615 })
2616 .collect();
2617
2618 if !entries.is_empty() {
2619 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2620 before_edit,
2621 entries,
2622 is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
2623 ignore_empty_lines: false,
2624 }));
2625 }
2626 }
2627
2628 self.end_transaction(cx);
2629 self.send_operation(Operation::Buffer(edit_operation), true, cx);
2630 Some(edit_id)
2631 }
2632
2633 fn did_edit(&mut self, old_version: &clock::Global, was_dirty: bool, cx: &mut Context<Self>) {
2634 self.was_changed();
2635
2636 if self.edits_since::<usize>(old_version).next().is_none() {
2637 return;
2638 }
2639
2640 self.reparse(cx, true);
2641 cx.emit(BufferEvent::Edited);
2642 if was_dirty != self.is_dirty() {
2643 cx.emit(BufferEvent::DirtyChanged);
2644 }
2645 cx.notify();
2646 }
2647
2648 pub fn autoindent_ranges<I, T>(&mut self, ranges: I, cx: &mut Context<Self>)
2649 where
2650 I: IntoIterator<Item = Range<T>>,
2651 T: ToOffset + Copy,
2652 {
2653 let before_edit = self.snapshot();
2654 let entries = ranges
2655 .into_iter()
2656 .map(|range| AutoindentRequestEntry {
2657 range: before_edit.anchor_before(range.start)..before_edit.anchor_after(range.end),
2658 first_line_is_new: true,
2659 indent_size: before_edit.language_indent_size_at(range.start, cx),
2660 original_indent_column: None,
2661 })
2662 .collect();
2663 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2664 before_edit,
2665 entries,
2666 is_block_mode: false,
2667 ignore_empty_lines: true,
2668 }));
2669 self.request_autoindent(cx);
2670 }
2671
2672 // Inserts newlines at the given position to create an empty line, returning the start of the new line.
2673 // You can also request the insertion of empty lines above and below the line starting at the returned point.
2674 pub fn insert_empty_line(
2675 &mut self,
2676 position: impl ToPoint,
2677 space_above: bool,
2678 space_below: bool,
2679 cx: &mut Context<Self>,
2680 ) -> Point {
2681 let mut position = position.to_point(self);
2682
2683 self.start_transaction();
2684
2685 self.edit(
2686 [(position..position, "\n")],
2687 Some(AutoindentMode::EachLine),
2688 cx,
2689 );
2690
2691 if position.column > 0 {
2692 position += Point::new(1, 0);
2693 }
2694
2695 if !self.is_line_blank(position.row) {
2696 self.edit(
2697 [(position..position, "\n")],
2698 Some(AutoindentMode::EachLine),
2699 cx,
2700 );
2701 }
2702
2703 if space_above && position.row > 0 && !self.is_line_blank(position.row - 1) {
2704 self.edit(
2705 [(position..position, "\n")],
2706 Some(AutoindentMode::EachLine),
2707 cx,
2708 );
2709 position.row += 1;
2710 }
2711
2712 if space_below
2713 && (position.row == self.max_point().row || !self.is_line_blank(position.row + 1))
2714 {
2715 self.edit(
2716 [(position..position, "\n")],
2717 Some(AutoindentMode::EachLine),
2718 cx,
2719 );
2720 }
2721
2722 self.end_transaction(cx);
2723
2724 position
2725 }
2726
2727 /// Applies the given remote operations to the buffer.
2728 pub fn apply_ops<I: IntoIterator<Item = Operation>>(&mut self, ops: I, cx: &mut Context<Self>) {
2729 self.pending_autoindent.take();
2730 let was_dirty = self.is_dirty();
2731 let old_version = self.version.clone();
2732 let mut deferred_ops = Vec::new();
2733 let buffer_ops = ops
2734 .into_iter()
2735 .filter_map(|op| match op {
2736 Operation::Buffer(op) => Some(op),
2737 _ => {
2738 if self.can_apply_op(&op) {
2739 self.apply_op(op, cx);
2740 } else {
2741 deferred_ops.push(op);
2742 }
2743 None
2744 }
2745 })
2746 .collect::<Vec<_>>();
2747 for operation in buffer_ops.iter() {
2748 self.send_operation(Operation::Buffer(operation.clone()), false, cx);
2749 }
2750 self.text.apply_ops(buffer_ops);
2751 self.deferred_ops.insert(deferred_ops);
2752 self.flush_deferred_ops(cx);
2753 self.did_edit(&old_version, was_dirty, cx);
2754 // Notify independently of whether the buffer was edited as the operations could include a
2755 // selection update.
2756 cx.notify();
2757 }
2758
2759 fn flush_deferred_ops(&mut self, cx: &mut Context<Self>) {
2760 let mut deferred_ops = Vec::new();
2761 for op in self.deferred_ops.drain().iter().cloned() {
2762 if self.can_apply_op(&op) {
2763 self.apply_op(op, cx);
2764 } else {
2765 deferred_ops.push(op);
2766 }
2767 }
2768 self.deferred_ops.insert(deferred_ops);
2769 }
2770
2771 pub fn has_deferred_ops(&self) -> bool {
2772 !self.deferred_ops.is_empty() || self.text.has_deferred_ops()
2773 }
2774
2775 fn can_apply_op(&self, operation: &Operation) -> bool {
2776 match operation {
2777 Operation::Buffer(_) => {
2778 unreachable!("buffer operations should never be applied at this layer")
2779 }
2780 Operation::UpdateDiagnostics {
2781 diagnostics: diagnostic_set,
2782 ..
2783 } => diagnostic_set.iter().all(|diagnostic| {
2784 self.text.can_resolve(&diagnostic.range.start)
2785 && self.text.can_resolve(&diagnostic.range.end)
2786 }),
2787 Operation::UpdateSelections { selections, .. } => selections
2788 .iter()
2789 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
2790 Operation::UpdateCompletionTriggers { .. } | Operation::UpdateLineEnding { .. } => true,
2791 }
2792 }
2793
2794 fn apply_op(&mut self, operation: Operation, cx: &mut Context<Self>) {
2795 match operation {
2796 Operation::Buffer(_) => {
2797 unreachable!("buffer operations should never be applied at this layer")
2798 }
2799 Operation::UpdateDiagnostics {
2800 server_id,
2801 diagnostics: diagnostic_set,
2802 lamport_timestamp,
2803 } => {
2804 let snapshot = self.snapshot();
2805 self.apply_diagnostic_update(
2806 server_id,
2807 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
2808 lamport_timestamp,
2809 cx,
2810 );
2811 }
2812 Operation::UpdateSelections {
2813 selections,
2814 lamport_timestamp,
2815 line_mode,
2816 cursor_shape,
2817 } => {
2818 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id)
2819 && set.lamport_timestamp > lamport_timestamp
2820 {
2821 return;
2822 }
2823
2824 self.remote_selections.insert(
2825 lamport_timestamp.replica_id,
2826 SelectionSet {
2827 selections,
2828 lamport_timestamp,
2829 line_mode,
2830 cursor_shape,
2831 },
2832 );
2833 self.text.lamport_clock.observe(lamport_timestamp);
2834 self.non_text_state_update_count += 1;
2835 }
2836 Operation::UpdateCompletionTriggers {
2837 triggers,
2838 lamport_timestamp,
2839 server_id,
2840 } => {
2841 if triggers.is_empty() {
2842 self.completion_triggers_per_language_server
2843 .remove(&server_id);
2844 self.completion_triggers = self
2845 .completion_triggers_per_language_server
2846 .values()
2847 .flat_map(|triggers| triggers.iter().cloned())
2848 .collect();
2849 } else {
2850 self.completion_triggers_per_language_server
2851 .insert(server_id, triggers.iter().cloned().collect());
2852 self.completion_triggers.extend(triggers);
2853 }
2854 self.text.lamport_clock.observe(lamport_timestamp);
2855 }
2856 Operation::UpdateLineEnding {
2857 line_ending,
2858 lamport_timestamp,
2859 } => {
2860 self.text.set_line_ending(line_ending);
2861 self.text.lamport_clock.observe(lamport_timestamp);
2862 }
2863 }
2864 }
2865
2866 fn apply_diagnostic_update(
2867 &mut self,
2868 server_id: LanguageServerId,
2869 diagnostics: DiagnosticSet,
2870 lamport_timestamp: clock::Lamport,
2871 cx: &mut Context<Self>,
2872 ) {
2873 if lamport_timestamp > self.diagnostics_timestamp {
2874 let ix = self.diagnostics.binary_search_by_key(&server_id, |e| e.0);
2875 if diagnostics.is_empty() {
2876 if let Ok(ix) = ix {
2877 self.diagnostics.remove(ix);
2878 }
2879 } else {
2880 match ix {
2881 Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)),
2882 Ok(ix) => self.diagnostics[ix].1 = diagnostics,
2883 };
2884 }
2885 self.diagnostics_timestamp = lamport_timestamp;
2886 self.non_text_state_update_count += 1;
2887 self.text.lamport_clock.observe(lamport_timestamp);
2888 cx.notify();
2889 cx.emit(BufferEvent::DiagnosticsUpdated);
2890 }
2891 }
2892
2893 fn send_operation(&mut self, operation: Operation, is_local: bool, cx: &mut Context<Self>) {
2894 self.was_changed();
2895 cx.emit(BufferEvent::Operation {
2896 operation,
2897 is_local,
2898 });
2899 }
2900
2901 /// Removes the selections for a given peer.
2902 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut Context<Self>) {
2903 self.remote_selections.remove(&replica_id);
2904 cx.notify();
2905 }
2906
2907 /// Undoes the most recent transaction.
2908 pub fn undo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2909 let was_dirty = self.is_dirty();
2910 let old_version = self.version.clone();
2911
2912 if let Some((transaction_id, operation)) = self.text.undo() {
2913 self.send_operation(Operation::Buffer(operation), true, cx);
2914 self.did_edit(&old_version, was_dirty, cx);
2915 Some(transaction_id)
2916 } else {
2917 None
2918 }
2919 }
2920
2921 /// Manually undoes a specific transaction in the buffer's undo history.
2922 pub fn undo_transaction(
2923 &mut self,
2924 transaction_id: TransactionId,
2925 cx: &mut Context<Self>,
2926 ) -> bool {
2927 let was_dirty = self.is_dirty();
2928 let old_version = self.version.clone();
2929 if let Some(operation) = self.text.undo_transaction(transaction_id) {
2930 self.send_operation(Operation::Buffer(operation), true, cx);
2931 self.did_edit(&old_version, was_dirty, cx);
2932 true
2933 } else {
2934 false
2935 }
2936 }
2937
2938 /// Manually undoes all changes after a given transaction in the buffer's undo history.
2939 pub fn undo_to_transaction(
2940 &mut self,
2941 transaction_id: TransactionId,
2942 cx: &mut Context<Self>,
2943 ) -> bool {
2944 let was_dirty = self.is_dirty();
2945 let old_version = self.version.clone();
2946
2947 let operations = self.text.undo_to_transaction(transaction_id);
2948 let undone = !operations.is_empty();
2949 for operation in operations {
2950 self.send_operation(Operation::Buffer(operation), true, cx);
2951 }
2952 if undone {
2953 self.did_edit(&old_version, was_dirty, cx)
2954 }
2955 undone
2956 }
2957
2958 pub fn undo_operations(&mut self, counts: HashMap<Lamport, u32>, cx: &mut Context<Buffer>) {
2959 let was_dirty = self.is_dirty();
2960 let operation = self.text.undo_operations(counts);
2961 let old_version = self.version.clone();
2962 self.send_operation(Operation::Buffer(operation), true, cx);
2963 self.did_edit(&old_version, was_dirty, cx);
2964 }
2965
2966 /// Manually redoes a specific transaction in the buffer's redo history.
2967 pub fn redo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2968 let was_dirty = self.is_dirty();
2969 let old_version = self.version.clone();
2970
2971 if let Some((transaction_id, operation)) = self.text.redo() {
2972 self.send_operation(Operation::Buffer(operation), true, cx);
2973 self.did_edit(&old_version, was_dirty, cx);
2974 Some(transaction_id)
2975 } else {
2976 None
2977 }
2978 }
2979
2980 /// Manually undoes all changes until a given transaction in the buffer's redo history.
2981 pub fn redo_to_transaction(
2982 &mut self,
2983 transaction_id: TransactionId,
2984 cx: &mut Context<Self>,
2985 ) -> bool {
2986 let was_dirty = self.is_dirty();
2987 let old_version = self.version.clone();
2988
2989 let operations = self.text.redo_to_transaction(transaction_id);
2990 let redone = !operations.is_empty();
2991 for operation in operations {
2992 self.send_operation(Operation::Buffer(operation), true, cx);
2993 }
2994 if redone {
2995 self.did_edit(&old_version, was_dirty, cx)
2996 }
2997 redone
2998 }
2999
3000 /// Override current completion triggers with the user-provided completion triggers.
3001 pub fn set_completion_triggers(
3002 &mut self,
3003 server_id: LanguageServerId,
3004 triggers: BTreeSet<String>,
3005 cx: &mut Context<Self>,
3006 ) {
3007 self.completion_triggers_timestamp = self.text.lamport_clock.tick();
3008 if triggers.is_empty() {
3009 self.completion_triggers_per_language_server
3010 .remove(&server_id);
3011 self.completion_triggers = self
3012 .completion_triggers_per_language_server
3013 .values()
3014 .flat_map(|triggers| triggers.iter().cloned())
3015 .collect();
3016 } else {
3017 self.completion_triggers_per_language_server
3018 .insert(server_id, triggers.clone());
3019 self.completion_triggers.extend(triggers.iter().cloned());
3020 }
3021 self.send_operation(
3022 Operation::UpdateCompletionTriggers {
3023 triggers: triggers.into_iter().collect(),
3024 lamport_timestamp: self.completion_triggers_timestamp,
3025 server_id,
3026 },
3027 true,
3028 cx,
3029 );
3030 cx.notify();
3031 }
3032
3033 /// Returns a list of strings which trigger a completion menu for this language.
3034 /// Usually this is driven by LSP server which returns a list of trigger characters for completions.
3035 pub fn completion_triggers(&self) -> &BTreeSet<String> {
3036 &self.completion_triggers
3037 }
3038
3039 /// Call this directly after performing edits to prevent the preview tab
3040 /// from being dismissed by those edits. It causes `should_dismiss_preview`
3041 /// to return false until there are additional edits.
3042 pub fn refresh_preview(&mut self) {
3043 self.preview_version = self.version.clone();
3044 }
3045
3046 /// Whether we should preserve the preview status of a tab containing this buffer.
3047 pub fn preserve_preview(&self) -> bool {
3048 !self.has_edits_since(&self.preview_version)
3049 }
3050}
3051
3052#[doc(hidden)]
3053#[cfg(any(test, feature = "test-support"))]
3054impl Buffer {
3055 pub fn edit_via_marked_text(
3056 &mut self,
3057 marked_string: &str,
3058 autoindent_mode: Option<AutoindentMode>,
3059 cx: &mut Context<Self>,
3060 ) {
3061 let edits = self.edits_for_marked_text(marked_string);
3062 self.edit(edits, autoindent_mode, cx);
3063 }
3064
3065 pub fn set_group_interval(&mut self, group_interval: Duration) {
3066 self.text.set_group_interval(group_interval);
3067 }
3068
3069 pub fn randomly_edit<T>(&mut self, rng: &mut T, old_range_count: usize, cx: &mut Context<Self>)
3070 where
3071 T: rand::Rng,
3072 {
3073 let mut edits: Vec<(Range<usize>, String)> = Vec::new();
3074 let mut last_end = None;
3075 for _ in 0..old_range_count {
3076 if last_end.is_some_and(|last_end| last_end >= self.len()) {
3077 break;
3078 }
3079
3080 let new_start = last_end.map_or(0, |last_end| last_end + 1);
3081 let mut range = self.random_byte_range(new_start, rng);
3082 if rng.random_bool(0.2) {
3083 mem::swap(&mut range.start, &mut range.end);
3084 }
3085 last_end = Some(range.end);
3086
3087 let new_text_len = rng.random_range(0..10);
3088 let mut new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
3089 new_text = new_text.to_uppercase();
3090
3091 edits.push((range, new_text));
3092 }
3093 log::info!("mutating buffer {:?} with {:?}", self.replica_id(), edits);
3094 self.edit(edits, None, cx);
3095 }
3096
3097 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut Context<Self>) {
3098 let was_dirty = self.is_dirty();
3099 let old_version = self.version.clone();
3100
3101 let ops = self.text.randomly_undo_redo(rng);
3102 if !ops.is_empty() {
3103 for op in ops {
3104 self.send_operation(Operation::Buffer(op), true, cx);
3105 self.did_edit(&old_version, was_dirty, cx);
3106 }
3107 }
3108 }
3109}
3110
3111impl EventEmitter<BufferEvent> for Buffer {}
3112
3113impl Deref for Buffer {
3114 type Target = TextBuffer;
3115
3116 fn deref(&self) -> &Self::Target {
3117 &self.text
3118 }
3119}
3120
3121impl BufferSnapshot {
3122 /// Returns [`IndentSize`] for a given line that respects user settings and
3123 /// language preferences.
3124 pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
3125 indent_size_for_line(self, row)
3126 }
3127
3128 /// Returns [`IndentSize`] for a given position that respects user settings
3129 /// and language preferences.
3130 pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &App) -> IndentSize {
3131 let settings = language_settings(
3132 self.language_at(position).map(|l| l.name()),
3133 self.file(),
3134 cx,
3135 );
3136 if settings.hard_tabs {
3137 IndentSize::tab()
3138 } else {
3139 IndentSize::spaces(settings.tab_size.get())
3140 }
3141 }
3142
3143 /// Retrieve the suggested indent size for all of the given rows. The unit of indentation
3144 /// is passed in as `single_indent_size`.
3145 pub fn suggested_indents(
3146 &self,
3147 rows: impl Iterator<Item = u32>,
3148 single_indent_size: IndentSize,
3149 ) -> BTreeMap<u32, IndentSize> {
3150 let mut result = BTreeMap::new();
3151
3152 for row_range in contiguous_ranges(rows, 10) {
3153 let suggestions = match self.suggest_autoindents(row_range.clone()) {
3154 Some(suggestions) => suggestions,
3155 _ => break,
3156 };
3157
3158 for (row, suggestion) in row_range.zip(suggestions) {
3159 let indent_size = if let Some(suggestion) = suggestion {
3160 result
3161 .get(&suggestion.basis_row)
3162 .copied()
3163 .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
3164 .with_delta(suggestion.delta, single_indent_size)
3165 } else {
3166 self.indent_size_for_line(row)
3167 };
3168
3169 result.insert(row, indent_size);
3170 }
3171 }
3172
3173 result
3174 }
3175
3176 fn suggest_autoindents(
3177 &self,
3178 row_range: Range<u32>,
3179 ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
3180 let config = &self.language.as_ref()?.config;
3181 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
3182
3183 #[derive(Debug, Clone)]
3184 struct StartPosition {
3185 start: Point,
3186 suffix: SharedString,
3187 }
3188
3189 // Find the suggested indentation ranges based on the syntax tree.
3190 let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
3191 let end = Point::new(row_range.end, 0);
3192 let range = (start..end).to_offset(&self.text);
3193 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3194 Some(&grammar.indents_config.as_ref()?.query)
3195 });
3196 let indent_configs = matches
3197 .grammars()
3198 .iter()
3199 .map(|grammar| grammar.indents_config.as_ref().unwrap())
3200 .collect::<Vec<_>>();
3201
3202 let mut indent_ranges = Vec::<Range<Point>>::new();
3203 let mut start_positions = Vec::<StartPosition>::new();
3204 let mut outdent_positions = Vec::<Point>::new();
3205 while let Some(mat) = matches.peek() {
3206 let mut start: Option<Point> = None;
3207 let mut end: Option<Point> = None;
3208
3209 let config = indent_configs[mat.grammar_index];
3210 for capture in mat.captures {
3211 if capture.index == config.indent_capture_ix {
3212 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
3213 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
3214 } else if Some(capture.index) == config.start_capture_ix {
3215 start = Some(Point::from_ts_point(capture.node.end_position()));
3216 } else if Some(capture.index) == config.end_capture_ix {
3217 end = Some(Point::from_ts_point(capture.node.start_position()));
3218 } else if Some(capture.index) == config.outdent_capture_ix {
3219 outdent_positions.push(Point::from_ts_point(capture.node.start_position()));
3220 } else if let Some(suffix) = config.suffixed_start_captures.get(&capture.index) {
3221 start_positions.push(StartPosition {
3222 start: Point::from_ts_point(capture.node.start_position()),
3223 suffix: suffix.clone(),
3224 });
3225 }
3226 }
3227
3228 matches.advance();
3229 if let Some((start, end)) = start.zip(end) {
3230 if start.row == end.row {
3231 continue;
3232 }
3233 let range = start..end;
3234 match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
3235 Err(ix) => indent_ranges.insert(ix, range),
3236 Ok(ix) => {
3237 let prev_range = &mut indent_ranges[ix];
3238 prev_range.end = prev_range.end.max(range.end);
3239 }
3240 }
3241 }
3242 }
3243
3244 let mut error_ranges = Vec::<Range<Point>>::new();
3245 let mut matches = self
3246 .syntax
3247 .matches(range, &self.text, |grammar| grammar.error_query.as_ref());
3248 while let Some(mat) = matches.peek() {
3249 let node = mat.captures[0].node;
3250 let start = Point::from_ts_point(node.start_position());
3251 let end = Point::from_ts_point(node.end_position());
3252 let range = start..end;
3253 let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
3254 Ok(ix) | Err(ix) => ix,
3255 };
3256 let mut end_ix = ix;
3257 while let Some(existing_range) = error_ranges.get(end_ix) {
3258 if existing_range.end < end {
3259 end_ix += 1;
3260 } else {
3261 break;
3262 }
3263 }
3264 error_ranges.splice(ix..end_ix, [range]);
3265 matches.advance();
3266 }
3267
3268 outdent_positions.sort();
3269 for outdent_position in outdent_positions {
3270 // find the innermost indent range containing this outdent_position
3271 // set its end to the outdent position
3272 if let Some(range_to_truncate) = indent_ranges
3273 .iter_mut()
3274 .filter(|indent_range| indent_range.contains(&outdent_position))
3275 .next_back()
3276 {
3277 range_to_truncate.end = outdent_position;
3278 }
3279 }
3280
3281 start_positions.sort_by_key(|b| b.start);
3282
3283 // Find the suggested indentation increases and decreased based on regexes.
3284 let mut regex_outdent_map = HashMap::default();
3285 let mut last_seen_suffix: HashMap<String, Vec<Point>> = HashMap::default();
3286 let mut start_positions_iter = start_positions.iter().peekable();
3287
3288 let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
3289 self.for_each_line(
3290 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
3291 ..Point::new(row_range.end, 0),
3292 |row, line| {
3293 if config
3294 .decrease_indent_pattern
3295 .as_ref()
3296 .is_some_and(|regex| regex.is_match(line))
3297 {
3298 indent_change_rows.push((row, Ordering::Less));
3299 }
3300 if config
3301 .increase_indent_pattern
3302 .as_ref()
3303 .is_some_and(|regex| regex.is_match(line))
3304 {
3305 indent_change_rows.push((row + 1, Ordering::Greater));
3306 }
3307 while let Some(pos) = start_positions_iter.peek() {
3308 if pos.start.row < row {
3309 let pos = start_positions_iter.next().unwrap();
3310 last_seen_suffix
3311 .entry(pos.suffix.to_string())
3312 .or_default()
3313 .push(pos.start);
3314 } else {
3315 break;
3316 }
3317 }
3318 for rule in &config.decrease_indent_patterns {
3319 if rule.pattern.as_ref().is_some_and(|r| r.is_match(line)) {
3320 let row_start_column = self.indent_size_for_line(row).len;
3321 let basis_row = rule
3322 .valid_after
3323 .iter()
3324 .filter_map(|valid_suffix| last_seen_suffix.get(valid_suffix))
3325 .flatten()
3326 .filter(|start_point| start_point.column <= row_start_column)
3327 .max_by_key(|start_point| start_point.row);
3328 if let Some(outdent_to_row) = basis_row {
3329 regex_outdent_map.insert(row, outdent_to_row.row);
3330 }
3331 break;
3332 }
3333 }
3334 },
3335 );
3336
3337 let mut indent_changes = indent_change_rows.into_iter().peekable();
3338 let mut prev_row = if config.auto_indent_using_last_non_empty_line {
3339 prev_non_blank_row.unwrap_or(0)
3340 } else {
3341 row_range.start.saturating_sub(1)
3342 };
3343
3344 let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
3345 Some(row_range.map(move |row| {
3346 let row_start = Point::new(row, self.indent_size_for_line(row).len);
3347
3348 let mut indent_from_prev_row = false;
3349 let mut outdent_from_prev_row = false;
3350 let mut outdent_to_row = u32::MAX;
3351 let mut from_regex = false;
3352
3353 while let Some((indent_row, delta)) = indent_changes.peek() {
3354 match indent_row.cmp(&row) {
3355 Ordering::Equal => match delta {
3356 Ordering::Less => {
3357 from_regex = true;
3358 outdent_from_prev_row = true
3359 }
3360 Ordering::Greater => {
3361 indent_from_prev_row = true;
3362 from_regex = true
3363 }
3364 _ => {}
3365 },
3366
3367 Ordering::Greater => break,
3368 Ordering::Less => {}
3369 }
3370
3371 indent_changes.next();
3372 }
3373
3374 for range in &indent_ranges {
3375 if range.start.row >= row {
3376 break;
3377 }
3378 if range.start.row == prev_row && range.end > row_start {
3379 indent_from_prev_row = true;
3380 }
3381 if range.end > prev_row_start && range.end <= row_start {
3382 outdent_to_row = outdent_to_row.min(range.start.row);
3383 }
3384 }
3385
3386 if let Some(basis_row) = regex_outdent_map.get(&row) {
3387 indent_from_prev_row = false;
3388 outdent_to_row = *basis_row;
3389 from_regex = true;
3390 }
3391
3392 let within_error = error_ranges
3393 .iter()
3394 .any(|e| e.start.row < row && e.end > row_start);
3395
3396 let suggestion = if outdent_to_row == prev_row
3397 || (outdent_from_prev_row && indent_from_prev_row)
3398 {
3399 Some(IndentSuggestion {
3400 basis_row: prev_row,
3401 delta: Ordering::Equal,
3402 within_error: within_error && !from_regex,
3403 })
3404 } else if indent_from_prev_row {
3405 Some(IndentSuggestion {
3406 basis_row: prev_row,
3407 delta: Ordering::Greater,
3408 within_error: within_error && !from_regex,
3409 })
3410 } else if outdent_to_row < prev_row {
3411 Some(IndentSuggestion {
3412 basis_row: outdent_to_row,
3413 delta: Ordering::Equal,
3414 within_error: within_error && !from_regex,
3415 })
3416 } else if outdent_from_prev_row {
3417 Some(IndentSuggestion {
3418 basis_row: prev_row,
3419 delta: Ordering::Less,
3420 within_error: within_error && !from_regex,
3421 })
3422 } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
3423 {
3424 Some(IndentSuggestion {
3425 basis_row: prev_row,
3426 delta: Ordering::Equal,
3427 within_error: within_error && !from_regex,
3428 })
3429 } else {
3430 None
3431 };
3432
3433 prev_row = row;
3434 prev_row_start = row_start;
3435 suggestion
3436 }))
3437 }
3438
3439 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
3440 while row > 0 {
3441 row -= 1;
3442 if !self.is_line_blank(row) {
3443 return Some(row);
3444 }
3445 }
3446 None
3447 }
3448
3449 fn get_highlights(&self, range: Range<usize>) -> (SyntaxMapCaptures<'_>, Vec<HighlightMap>) {
3450 let captures = self.syntax.captures(range, &self.text, |grammar| {
3451 grammar
3452 .highlights_config
3453 .as_ref()
3454 .map(|config| &config.query)
3455 });
3456 let highlight_maps = captures
3457 .grammars()
3458 .iter()
3459 .map(|grammar| grammar.highlight_map())
3460 .collect();
3461 (captures, highlight_maps)
3462 }
3463
3464 /// Iterates over chunks of text in the given range of the buffer. Text is chunked
3465 /// in an arbitrary way due to being stored in a [`Rope`](text::Rope). The text is also
3466 /// returned in chunks where each chunk has a single syntax highlighting style and
3467 /// diagnostic status.
3468 pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks<'_> {
3469 let range = range.start.to_offset(self)..range.end.to_offset(self);
3470
3471 let mut syntax = None;
3472 if language_aware {
3473 syntax = Some(self.get_highlights(range.clone()));
3474 }
3475 // We want to look at diagnostic spans only when iterating over language-annotated chunks.
3476 let diagnostics = language_aware;
3477 BufferChunks::new(self.text.as_rope(), range, syntax, diagnostics, Some(self))
3478 }
3479
3480 pub fn highlighted_text_for_range<T: ToOffset>(
3481 &self,
3482 range: Range<T>,
3483 override_style: Option<HighlightStyle>,
3484 syntax_theme: &SyntaxTheme,
3485 ) -> HighlightedText {
3486 HighlightedText::from_buffer_range(
3487 range,
3488 &self.text,
3489 &self.syntax,
3490 override_style,
3491 syntax_theme,
3492 )
3493 }
3494
3495 /// Invokes the given callback for each line of text in the given range of the buffer.
3496 /// Uses callback to avoid allocating a string for each line.
3497 fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
3498 let mut line = String::new();
3499 let mut row = range.start.row;
3500 for chunk in self
3501 .as_rope()
3502 .chunks_in_range(range.to_offset(self))
3503 .chain(["\n"])
3504 {
3505 for (newline_ix, text) in chunk.split('\n').enumerate() {
3506 if newline_ix > 0 {
3507 callback(row, &line);
3508 row += 1;
3509 line.clear();
3510 }
3511 line.push_str(text);
3512 }
3513 }
3514 }
3515
3516 /// Iterates over every [`SyntaxLayer`] in the buffer.
3517 pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3518 self.syntax_layers_for_range(0..self.len(), true)
3519 }
3520
3521 pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayer<'_>> {
3522 let offset = position.to_offset(self);
3523 self.syntax_layers_for_range(offset..offset, false)
3524 .filter(|l| {
3525 if let Some(ranges) = l.included_sub_ranges {
3526 ranges.iter().any(|range| {
3527 let start = range.start.to_offset(self);
3528 start <= offset && {
3529 let end = range.end.to_offset(self);
3530 offset < end
3531 }
3532 })
3533 } else {
3534 l.node().start_byte() <= offset && l.node().end_byte() > offset
3535 }
3536 })
3537 .last()
3538 }
3539
3540 pub fn syntax_layers_for_range<D: ToOffset>(
3541 &self,
3542 range: Range<D>,
3543 include_hidden: bool,
3544 ) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3545 self.syntax
3546 .layers_for_range(range, &self.text, include_hidden)
3547 }
3548
3549 pub fn smallest_syntax_layer_containing<D: ToOffset>(
3550 &self,
3551 range: Range<D>,
3552 ) -> Option<SyntaxLayer<'_>> {
3553 let range = range.to_offset(self);
3554 self.syntax
3555 .layers_for_range(range, &self.text, false)
3556 .max_by(|a, b| {
3557 if a.depth != b.depth {
3558 a.depth.cmp(&b.depth)
3559 } else if a.offset.0 != b.offset.0 {
3560 a.offset.0.cmp(&b.offset.0)
3561 } else {
3562 a.node().end_byte().cmp(&b.node().end_byte()).reverse()
3563 }
3564 })
3565 }
3566
3567 /// Returns the main [`Language`].
3568 pub fn language(&self) -> Option<&Arc<Language>> {
3569 self.language.as_ref()
3570 }
3571
3572 /// Returns the [`Language`] at the given location.
3573 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
3574 self.syntax_layer_at(position)
3575 .map(|info| info.language)
3576 .or(self.language.as_ref())
3577 }
3578
3579 /// Returns the settings for the language at the given location.
3580 pub fn settings_at<'a, D: ToOffset>(
3581 &'a self,
3582 position: D,
3583 cx: &'a App,
3584 ) -> Cow<'a, LanguageSettings> {
3585 language_settings(
3586 self.language_at(position).map(|l| l.name()),
3587 self.file.as_ref(),
3588 cx,
3589 )
3590 }
3591
3592 pub fn char_classifier_at<T: ToOffset>(&self, point: T) -> CharClassifier {
3593 CharClassifier::new(self.language_scope_at(point))
3594 }
3595
3596 /// Returns the [`LanguageScope`] at the given location.
3597 pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
3598 let offset = position.to_offset(self);
3599 let mut scope = None;
3600 let mut smallest_range_and_depth: Option<(Range<usize>, usize)> = None;
3601
3602 // Use the layer that has the smallest node intersecting the given point.
3603 for layer in self
3604 .syntax
3605 .layers_for_range(offset..offset, &self.text, false)
3606 {
3607 let mut cursor = layer.node().walk();
3608
3609 let mut range = None;
3610 loop {
3611 let child_range = cursor.node().byte_range();
3612 if !child_range.contains(&offset) {
3613 break;
3614 }
3615
3616 range = Some(child_range);
3617 if cursor.goto_first_child_for_byte(offset).is_none() {
3618 break;
3619 }
3620 }
3621
3622 if let Some(range) = range
3623 && smallest_range_and_depth.as_ref().is_none_or(
3624 |(smallest_range, smallest_range_depth)| {
3625 if layer.depth > *smallest_range_depth {
3626 true
3627 } else if layer.depth == *smallest_range_depth {
3628 range.len() < smallest_range.len()
3629 } else {
3630 false
3631 }
3632 },
3633 )
3634 {
3635 smallest_range_and_depth = Some((range, layer.depth));
3636 scope = Some(LanguageScope {
3637 language: layer.language.clone(),
3638 override_id: layer.override_id(offset, &self.text),
3639 });
3640 }
3641 }
3642
3643 scope.or_else(|| {
3644 self.language.clone().map(|language| LanguageScope {
3645 language,
3646 override_id: None,
3647 })
3648 })
3649 }
3650
3651 /// Returns a tuple of the range and character kind of the word
3652 /// surrounding the given position.
3653 pub fn surrounding_word<T: ToOffset>(
3654 &self,
3655 start: T,
3656 scope_context: Option<CharScopeContext>,
3657 ) -> (Range<usize>, Option<CharKind>) {
3658 let mut start = start.to_offset(self);
3659 let mut end = start;
3660 let mut next_chars = self.chars_at(start).take(128).peekable();
3661 let mut prev_chars = self.reversed_chars_at(start).take(128).peekable();
3662
3663 let classifier = self.char_classifier_at(start).scope_context(scope_context);
3664 let word_kind = cmp::max(
3665 prev_chars.peek().copied().map(|c| classifier.kind(c)),
3666 next_chars.peek().copied().map(|c| classifier.kind(c)),
3667 );
3668
3669 for ch in prev_chars {
3670 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3671 start -= ch.len_utf8();
3672 } else {
3673 break;
3674 }
3675 }
3676
3677 for ch in next_chars {
3678 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3679 end += ch.len_utf8();
3680 } else {
3681 break;
3682 }
3683 }
3684
3685 (start..end, word_kind)
3686 }
3687
3688 /// Moves the TreeCursor to the smallest descendant or ancestor syntax node enclosing the given
3689 /// range. When `require_larger` is true, the node found must be larger than the query range.
3690 ///
3691 /// Returns true if a node was found, and false otherwise. In the `false` case the cursor will
3692 /// be moved to the root of the tree.
3693 fn goto_node_enclosing_range(
3694 cursor: &mut tree_sitter::TreeCursor,
3695 query_range: &Range<usize>,
3696 require_larger: bool,
3697 ) -> bool {
3698 let mut ascending = false;
3699 loop {
3700 let mut range = cursor.node().byte_range();
3701 if query_range.is_empty() {
3702 // When the query range is empty and the current node starts after it, move to the
3703 // previous sibling to find the node the containing node.
3704 if range.start > query_range.start {
3705 cursor.goto_previous_sibling();
3706 range = cursor.node().byte_range();
3707 }
3708 } else {
3709 // When the query range is non-empty and the current node ends exactly at the start,
3710 // move to the next sibling to find a node that extends beyond the start.
3711 if range.end == query_range.start {
3712 cursor.goto_next_sibling();
3713 range = cursor.node().byte_range();
3714 }
3715 }
3716
3717 let encloses = range.contains_inclusive(query_range)
3718 && (!require_larger || range.len() > query_range.len());
3719 if !encloses {
3720 ascending = true;
3721 if !cursor.goto_parent() {
3722 return false;
3723 }
3724 continue;
3725 } else if ascending {
3726 return true;
3727 }
3728
3729 // Descend into the current node.
3730 if cursor
3731 .goto_first_child_for_byte(query_range.start)
3732 .is_none()
3733 {
3734 return true;
3735 }
3736 }
3737 }
3738
3739 pub fn syntax_ancestor<'a, T: ToOffset>(
3740 &'a self,
3741 range: Range<T>,
3742 ) -> Option<tree_sitter::Node<'a>> {
3743 let range = range.start.to_offset(self)..range.end.to_offset(self);
3744 let mut result: Option<tree_sitter::Node<'a>> = None;
3745 for layer in self
3746 .syntax
3747 .layers_for_range(range.clone(), &self.text, true)
3748 {
3749 let mut cursor = layer.node().walk();
3750
3751 // Find the node that both contains the range and is larger than it.
3752 if !Self::goto_node_enclosing_range(&mut cursor, &range, true) {
3753 continue;
3754 }
3755
3756 let left_node = cursor.node();
3757 let mut layer_result = left_node;
3758
3759 // For an empty range, try to find another node immediately to the right of the range.
3760 if left_node.end_byte() == range.start {
3761 let mut right_node = None;
3762 while !cursor.goto_next_sibling() {
3763 if !cursor.goto_parent() {
3764 break;
3765 }
3766 }
3767
3768 while cursor.node().start_byte() == range.start {
3769 right_node = Some(cursor.node());
3770 if !cursor.goto_first_child() {
3771 break;
3772 }
3773 }
3774
3775 // If there is a candidate node on both sides of the (empty) range, then
3776 // decide between the two by favoring a named node over an anonymous token.
3777 // If both nodes are the same in that regard, favor the right one.
3778 if let Some(right_node) = right_node
3779 && (right_node.is_named() || !left_node.is_named())
3780 {
3781 layer_result = right_node;
3782 }
3783 }
3784
3785 if let Some(previous_result) = &result
3786 && previous_result.byte_range().len() < layer_result.byte_range().len()
3787 {
3788 continue;
3789 }
3790 result = Some(layer_result);
3791 }
3792
3793 result
3794 }
3795
3796 /// Find the previous sibling syntax node at the given range.
3797 ///
3798 /// This function locates the syntax node that precedes the node containing
3799 /// the given range. It searches hierarchically by:
3800 /// 1. Finding the node that contains the given range
3801 /// 2. Looking for the previous sibling at the same tree level
3802 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
3803 ///
3804 /// Returns `None` if there is no previous sibling at any ancestor level.
3805 pub fn syntax_prev_sibling<'a, T: ToOffset>(
3806 &'a self,
3807 range: Range<T>,
3808 ) -> Option<tree_sitter::Node<'a>> {
3809 let range = range.start.to_offset(self)..range.end.to_offset(self);
3810 let mut result: Option<tree_sitter::Node<'a>> = None;
3811
3812 for layer in self
3813 .syntax
3814 .layers_for_range(range.clone(), &self.text, true)
3815 {
3816 let mut cursor = layer.node().walk();
3817
3818 // Find the node that contains the range
3819 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
3820 continue;
3821 }
3822
3823 // Look for the previous sibling, moving up ancestor levels if needed
3824 loop {
3825 if cursor.goto_previous_sibling() {
3826 let layer_result = cursor.node();
3827
3828 if let Some(previous_result) = &result {
3829 if previous_result.byte_range().end < layer_result.byte_range().end {
3830 continue;
3831 }
3832 }
3833 result = Some(layer_result);
3834 break;
3835 }
3836
3837 // No sibling found at this level, try moving up to parent
3838 if !cursor.goto_parent() {
3839 break;
3840 }
3841 }
3842 }
3843
3844 result
3845 }
3846
3847 /// Find the next sibling syntax node at the given range.
3848 ///
3849 /// This function locates the syntax node that follows the node containing
3850 /// the given range. It searches hierarchically by:
3851 /// 1. Finding the node that contains the given range
3852 /// 2. Looking for the next sibling at the same tree level
3853 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
3854 ///
3855 /// Returns `None` if there is no next sibling at any ancestor level.
3856 pub fn syntax_next_sibling<'a, T: ToOffset>(
3857 &'a self,
3858 range: Range<T>,
3859 ) -> Option<tree_sitter::Node<'a>> {
3860 let range = range.start.to_offset(self)..range.end.to_offset(self);
3861 let mut result: Option<tree_sitter::Node<'a>> = None;
3862
3863 for layer in self
3864 .syntax
3865 .layers_for_range(range.clone(), &self.text, true)
3866 {
3867 let mut cursor = layer.node().walk();
3868
3869 // Find the node that contains the range
3870 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
3871 continue;
3872 }
3873
3874 // Look for the next sibling, moving up ancestor levels if needed
3875 loop {
3876 if cursor.goto_next_sibling() {
3877 let layer_result = cursor.node();
3878
3879 if let Some(previous_result) = &result {
3880 if previous_result.byte_range().start > layer_result.byte_range().start {
3881 continue;
3882 }
3883 }
3884 result = Some(layer_result);
3885 break;
3886 }
3887
3888 // No sibling found at this level, try moving up to parent
3889 if !cursor.goto_parent() {
3890 break;
3891 }
3892 }
3893 }
3894
3895 result
3896 }
3897
3898 /// Returns the root syntax node within the given row
3899 pub fn syntax_root_ancestor(&self, position: Anchor) -> Option<tree_sitter::Node<'_>> {
3900 let start_offset = position.to_offset(self);
3901
3902 let row = self.summary_for_anchor::<text::PointUtf16>(&position).row as usize;
3903
3904 let layer = self
3905 .syntax
3906 .layers_for_range(start_offset..start_offset, &self.text, true)
3907 .next()?;
3908
3909 let mut cursor = layer.node().walk();
3910
3911 // Descend to the first leaf that touches the start of the range.
3912 while cursor.goto_first_child_for_byte(start_offset).is_some() {
3913 if cursor.node().end_byte() == start_offset {
3914 cursor.goto_next_sibling();
3915 }
3916 }
3917
3918 // Ascend to the root node within the same row.
3919 while cursor.goto_parent() {
3920 if cursor.node().start_position().row != row {
3921 break;
3922 }
3923 }
3924
3925 Some(cursor.node())
3926 }
3927
3928 /// Returns the outline for the buffer.
3929 ///
3930 /// This method allows passing an optional [`SyntaxTheme`] to
3931 /// syntax-highlight the returned symbols.
3932 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Outline<Anchor> {
3933 Outline::new(self.outline_items_containing(0..self.len(), true, theme))
3934 }
3935
3936 /// Returns all the symbols that contain the given position.
3937 ///
3938 /// This method allows passing an optional [`SyntaxTheme`] to
3939 /// syntax-highlight the returned symbols.
3940 pub fn symbols_containing<T: ToOffset>(
3941 &self,
3942 position: T,
3943 theme: Option<&SyntaxTheme>,
3944 ) -> Vec<OutlineItem<Anchor>> {
3945 let position = position.to_offset(self);
3946 let start = self.clip_offset(position.saturating_sub(1), Bias::Left);
3947 let end = self.clip_offset(position + 1, Bias::Right);
3948 let mut items = self.outline_items_containing(start..end, false, theme);
3949 let mut prev_depth = None;
3950 items.retain(|item| {
3951 let result = prev_depth.is_none_or(|prev_depth| item.depth > prev_depth);
3952 prev_depth = Some(item.depth);
3953 result
3954 });
3955 items
3956 }
3957
3958 pub fn outline_range_containing<T: ToOffset>(&self, range: Range<T>) -> Option<Range<Point>> {
3959 let range = range.to_offset(self);
3960 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3961 grammar.outline_config.as_ref().map(|c| &c.query)
3962 });
3963 let configs = matches
3964 .grammars()
3965 .iter()
3966 .map(|g| g.outline_config.as_ref().unwrap())
3967 .collect::<Vec<_>>();
3968
3969 while let Some(mat) = matches.peek() {
3970 let config = &configs[mat.grammar_index];
3971 let containing_item_node = maybe!({
3972 let item_node = mat.captures.iter().find_map(|cap| {
3973 if cap.index == config.item_capture_ix {
3974 Some(cap.node)
3975 } else {
3976 None
3977 }
3978 })?;
3979
3980 let item_byte_range = item_node.byte_range();
3981 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3982 None
3983 } else {
3984 Some(item_node)
3985 }
3986 });
3987
3988 if let Some(item_node) = containing_item_node {
3989 return Some(
3990 Point::from_ts_point(item_node.start_position())
3991 ..Point::from_ts_point(item_node.end_position()),
3992 );
3993 }
3994
3995 matches.advance();
3996 }
3997 None
3998 }
3999
4000 pub fn outline_items_containing<T: ToOffset>(
4001 &self,
4002 range: Range<T>,
4003 include_extra_context: bool,
4004 theme: Option<&SyntaxTheme>,
4005 ) -> Vec<OutlineItem<Anchor>> {
4006 self.outline_items_containing_internal(
4007 range,
4008 include_extra_context,
4009 theme,
4010 |this, range| this.anchor_after(range.start)..this.anchor_before(range.end),
4011 )
4012 }
4013
4014 pub fn outline_items_as_points_containing<T: ToOffset>(
4015 &self,
4016 range: Range<T>,
4017 include_extra_context: bool,
4018 theme: Option<&SyntaxTheme>,
4019 ) -> Vec<OutlineItem<Point>> {
4020 self.outline_items_containing_internal(range, include_extra_context, theme, |_, range| {
4021 range
4022 })
4023 }
4024
4025 fn outline_items_containing_internal<T: ToOffset, U>(
4026 &self,
4027 range: Range<T>,
4028 include_extra_context: bool,
4029 theme: Option<&SyntaxTheme>,
4030 range_callback: fn(&Self, Range<Point>) -> Range<U>,
4031 ) -> Vec<OutlineItem<U>> {
4032 let range = range.to_offset(self);
4033 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
4034 grammar.outline_config.as_ref().map(|c| &c.query)
4035 });
4036
4037 let mut items = Vec::new();
4038 let mut annotation_row_ranges: Vec<Range<u32>> = Vec::new();
4039 while let Some(mat) = matches.peek() {
4040 let config = matches.grammars()[mat.grammar_index]
4041 .outline_config
4042 .as_ref()
4043 .unwrap();
4044 if let Some(item) =
4045 self.next_outline_item(config, &mat, &range, include_extra_context, theme)
4046 {
4047 items.push(item);
4048 } else if let Some(capture) = mat
4049 .captures
4050 .iter()
4051 .find(|capture| Some(capture.index) == config.annotation_capture_ix)
4052 {
4053 let capture_range = capture.node.start_position()..capture.node.end_position();
4054 let mut capture_row_range =
4055 capture_range.start.row as u32..capture_range.end.row as u32;
4056 if capture_range.end.row > capture_range.start.row && capture_range.end.column == 0
4057 {
4058 capture_row_range.end -= 1;
4059 }
4060 if let Some(last_row_range) = annotation_row_ranges.last_mut() {
4061 if last_row_range.end >= capture_row_range.start.saturating_sub(1) {
4062 last_row_range.end = capture_row_range.end;
4063 } else {
4064 annotation_row_ranges.push(capture_row_range);
4065 }
4066 } else {
4067 annotation_row_ranges.push(capture_row_range);
4068 }
4069 }
4070 matches.advance();
4071 }
4072
4073 items.sort_by_key(|item| (item.range.start, Reverse(item.range.end)));
4074
4075 // Assign depths based on containment relationships and convert to anchors.
4076 let mut item_ends_stack = Vec::<Point>::new();
4077 let mut anchor_items = Vec::new();
4078 let mut annotation_row_ranges = annotation_row_ranges.into_iter().peekable();
4079 for item in items {
4080 while let Some(last_end) = item_ends_stack.last().copied() {
4081 if last_end < item.range.end {
4082 item_ends_stack.pop();
4083 } else {
4084 break;
4085 }
4086 }
4087
4088 let mut annotation_row_range = None;
4089 while let Some(next_annotation_row_range) = annotation_row_ranges.peek() {
4090 let row_preceding_item = item.range.start.row.saturating_sub(1);
4091 if next_annotation_row_range.end < row_preceding_item {
4092 annotation_row_ranges.next();
4093 } else {
4094 if next_annotation_row_range.end == row_preceding_item {
4095 annotation_row_range = Some(next_annotation_row_range.clone());
4096 annotation_row_ranges.next();
4097 }
4098 break;
4099 }
4100 }
4101
4102 anchor_items.push(OutlineItem {
4103 depth: item_ends_stack.len(),
4104 range: range_callback(self, item.range.clone()),
4105 source_range_for_text: range_callback(self, item.source_range_for_text.clone()),
4106 text: item.text,
4107 highlight_ranges: item.highlight_ranges,
4108 name_ranges: item.name_ranges,
4109 body_range: item.body_range.map(|r| range_callback(self, r)),
4110 annotation_range: annotation_row_range.map(|annotation_range| {
4111 let point_range = Point::new(annotation_range.start, 0)
4112 ..Point::new(annotation_range.end, self.line_len(annotation_range.end));
4113 range_callback(self, point_range)
4114 }),
4115 });
4116 item_ends_stack.push(item.range.end);
4117 }
4118
4119 anchor_items
4120 }
4121
4122 fn next_outline_item(
4123 &self,
4124 config: &OutlineConfig,
4125 mat: &SyntaxMapMatch,
4126 range: &Range<usize>,
4127 include_extra_context: bool,
4128 theme: Option<&SyntaxTheme>,
4129 ) -> Option<OutlineItem<Point>> {
4130 let item_node = mat.captures.iter().find_map(|cap| {
4131 if cap.index == config.item_capture_ix {
4132 Some(cap.node)
4133 } else {
4134 None
4135 }
4136 })?;
4137
4138 let item_byte_range = item_node.byte_range();
4139 if item_byte_range.end < range.start || item_byte_range.start > range.end {
4140 return None;
4141 }
4142 let item_point_range = Point::from_ts_point(item_node.start_position())
4143 ..Point::from_ts_point(item_node.end_position());
4144
4145 let mut open_point = None;
4146 let mut close_point = None;
4147
4148 let mut buffer_ranges = Vec::new();
4149 let mut add_to_buffer_ranges = |node: tree_sitter::Node, node_is_name| {
4150 let mut range = node.start_byte()..node.end_byte();
4151 let start = node.start_position();
4152 if node.end_position().row > start.row {
4153 range.end = range.start + self.line_len(start.row as u32) as usize - start.column;
4154 }
4155
4156 if !range.is_empty() {
4157 buffer_ranges.push((range, node_is_name));
4158 }
4159 };
4160
4161 for capture in mat.captures {
4162 if capture.index == config.name_capture_ix {
4163 add_to_buffer_ranges(capture.node, true);
4164 } else if Some(capture.index) == config.context_capture_ix
4165 || (Some(capture.index) == config.extra_context_capture_ix && include_extra_context)
4166 {
4167 add_to_buffer_ranges(capture.node, false);
4168 } else {
4169 if Some(capture.index) == config.open_capture_ix {
4170 open_point = Some(Point::from_ts_point(capture.node.end_position()));
4171 } else if Some(capture.index) == config.close_capture_ix {
4172 close_point = Some(Point::from_ts_point(capture.node.start_position()));
4173 }
4174 }
4175 }
4176
4177 if buffer_ranges.is_empty() {
4178 return None;
4179 }
4180 let source_range_for_text =
4181 buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end;
4182
4183 let mut text = String::new();
4184 let mut highlight_ranges = Vec::new();
4185 let mut name_ranges = Vec::new();
4186 let mut chunks = self.chunks(source_range_for_text.clone(), true);
4187 let mut last_buffer_range_end = 0;
4188 for (buffer_range, is_name) in buffer_ranges {
4189 let space_added = !text.is_empty() && buffer_range.start > last_buffer_range_end;
4190 if space_added {
4191 text.push(' ');
4192 }
4193 let before_append_len = text.len();
4194 let mut offset = buffer_range.start;
4195 chunks.seek(buffer_range.clone());
4196 for mut chunk in chunks.by_ref() {
4197 if chunk.text.len() > buffer_range.end - offset {
4198 chunk.text = &chunk.text[0..(buffer_range.end - offset)];
4199 offset = buffer_range.end;
4200 } else {
4201 offset += chunk.text.len();
4202 }
4203 let style = chunk
4204 .syntax_highlight_id
4205 .zip(theme)
4206 .and_then(|(highlight, theme)| highlight.style(theme));
4207 if let Some(style) = style {
4208 let start = text.len();
4209 let end = start + chunk.text.len();
4210 highlight_ranges.push((start..end, style));
4211 }
4212 text.push_str(chunk.text);
4213 if offset >= buffer_range.end {
4214 break;
4215 }
4216 }
4217 if is_name {
4218 let after_append_len = text.len();
4219 let start = if space_added && !name_ranges.is_empty() {
4220 before_append_len - 1
4221 } else {
4222 before_append_len
4223 };
4224 name_ranges.push(start..after_append_len);
4225 }
4226 last_buffer_range_end = buffer_range.end;
4227 }
4228
4229 Some(OutlineItem {
4230 depth: 0, // We'll calculate the depth later
4231 range: item_point_range,
4232 source_range_for_text: source_range_for_text.to_point(self),
4233 text,
4234 highlight_ranges,
4235 name_ranges,
4236 body_range: open_point.zip(close_point).map(|(start, end)| start..end),
4237 annotation_range: None,
4238 })
4239 }
4240
4241 pub fn function_body_fold_ranges<T: ToOffset>(
4242 &self,
4243 within: Range<T>,
4244 ) -> impl Iterator<Item = Range<usize>> + '_ {
4245 self.text_object_ranges(within, TreeSitterOptions::default())
4246 .filter_map(|(range, obj)| (obj == TextObject::InsideFunction).then_some(range))
4247 }
4248
4249 /// For each grammar in the language, runs the provided
4250 /// [`tree_sitter::Query`] against the given range.
4251 pub fn matches(
4252 &self,
4253 range: Range<usize>,
4254 query: fn(&Grammar) -> Option<&tree_sitter::Query>,
4255 ) -> SyntaxMapMatches<'_> {
4256 self.syntax.matches(range, self, query)
4257 }
4258
4259 /// Finds all [`RowChunks`] applicable to the given range, then returns all bracket pairs that intersect with those chunks.
4260 /// Hence, may return more bracket pairs than the range contains.
4261 ///
4262 /// Will omit known chunks.
4263 /// The resulting bracket match collections are not ordered.
4264 pub fn fetch_bracket_ranges(
4265 &self,
4266 range: Range<usize>,
4267 known_chunks: Option<(&Global, &HashSet<Range<BufferRow>>)>,
4268 ) -> HashMap<Range<BufferRow>, Vec<BracketMatch<usize>>> {
4269 let mut tree_sitter_data = self.latest_tree_sitter_data().clone();
4270
4271 let known_chunks = match known_chunks {
4272 Some((known_version, known_chunks)) => {
4273 if !tree_sitter_data
4274 .chunks
4275 .version()
4276 .changed_since(known_version)
4277 {
4278 known_chunks.clone()
4279 } else {
4280 HashSet::default()
4281 }
4282 }
4283 None => HashSet::default(),
4284 };
4285
4286 let mut new_bracket_matches = HashMap::default();
4287 let mut all_bracket_matches = HashMap::default();
4288
4289 for chunk in tree_sitter_data
4290 .chunks
4291 .applicable_chunks(&[self.anchor_before(range.start)..self.anchor_after(range.end)])
4292 {
4293 if known_chunks.contains(&chunk.row_range()) {
4294 continue;
4295 }
4296 let Some(chunk_range) = tree_sitter_data.chunks.chunk_range(chunk) else {
4297 continue;
4298 };
4299 let chunk_range = chunk_range.to_offset(&tree_sitter_data.chunks.snapshot);
4300
4301 let bracket_matches = match tree_sitter_data.brackets_by_chunks[chunk.id].take() {
4302 Some(cached_brackets) => cached_brackets,
4303 None => {
4304 let mut all_brackets = Vec::new();
4305 let mut opens = Vec::new();
4306 let mut color_pairs = Vec::new();
4307
4308 let mut matches =
4309 self.syntax
4310 .matches(chunk_range.clone(), &self.text, |grammar| {
4311 grammar.brackets_config.as_ref().map(|c| &c.query)
4312 });
4313 let configs = matches
4314 .grammars()
4315 .iter()
4316 .map(|grammar| grammar.brackets_config.as_ref().unwrap())
4317 .collect::<Vec<_>>();
4318
4319 while let Some(mat) = matches.peek() {
4320 let mut open = None;
4321 let mut close = None;
4322 let syntax_layer_depth = mat.depth;
4323 let config = configs[mat.grammar_index];
4324 let pattern = &config.patterns[mat.pattern_index];
4325 for capture in mat.captures {
4326 if capture.index == config.open_capture_ix {
4327 open = Some(capture.node.byte_range());
4328 } else if capture.index == config.close_capture_ix {
4329 close = Some(capture.node.byte_range());
4330 }
4331 }
4332
4333 matches.advance();
4334
4335 let Some((open_range, close_range)) = open.zip(close) else {
4336 continue;
4337 };
4338
4339 let bracket_range = open_range.start..=close_range.end;
4340 if !bracket_range.overlaps(&chunk_range) {
4341 continue;
4342 }
4343
4344 let index = all_brackets.len();
4345 all_brackets.push(BracketMatch {
4346 open_range: open_range.clone(),
4347 close_range: close_range.clone(),
4348 newline_only: pattern.newline_only,
4349 syntax_layer_depth,
4350 color_index: None,
4351 });
4352
4353 // Certain languages have "brackets" that are not brackets, e.g. tags. and such
4354 // bracket will match the entire tag with all text inside.
4355 // For now, avoid highlighting any pair that has more than single char in each bracket.
4356 // We need to colorize `<Element/>` bracket pairs, so cannot make this check stricter.
4357 let should_color = !pattern.rainbow_exclude
4358 && (open_range.len() == 1 || close_range.len() == 1);
4359 if should_color {
4360 opens.push(open_range.clone());
4361 color_pairs.push((open_range, close_range, index));
4362 }
4363 }
4364
4365 opens.sort_by_key(|r| (r.start, r.end));
4366 opens.dedup_by(|a, b| a.start == b.start && a.end == b.end);
4367 color_pairs.sort_by_key(|(_, close, _)| close.end);
4368
4369 let mut open_stack = Vec::new();
4370 let mut open_index = 0;
4371 for (open, close, index) in color_pairs {
4372 while open_index < opens.len() && opens[open_index].start < close.start {
4373 open_stack.push(opens[open_index].clone());
4374 open_index += 1;
4375 }
4376
4377 if open_stack.last() == Some(&open) {
4378 let depth_index = open_stack.len() - 1;
4379 all_brackets[index].color_index = Some(depth_index);
4380 open_stack.pop();
4381 }
4382 }
4383
4384 all_brackets.sort_by_key(|bracket_match| {
4385 (bracket_match.open_range.start, bracket_match.open_range.end)
4386 });
4387 new_bracket_matches.insert(chunk.id, all_brackets.clone());
4388 all_brackets
4389 }
4390 };
4391 all_bracket_matches.insert(chunk.row_range(), bracket_matches);
4392 }
4393
4394 let mut latest_tree_sitter_data = self.latest_tree_sitter_data();
4395 if latest_tree_sitter_data.chunks.version() == &self.version {
4396 for (chunk_id, new_matches) in new_bracket_matches {
4397 let old_chunks = &mut latest_tree_sitter_data.brackets_by_chunks[chunk_id];
4398 if old_chunks.is_none() {
4399 *old_chunks = Some(new_matches);
4400 }
4401 }
4402 }
4403
4404 all_bracket_matches
4405 }
4406
4407 fn latest_tree_sitter_data(&self) -> MutexGuard<'_, RawMutex, TreeSitterData> {
4408 let mut tree_sitter_data = self.tree_sitter_data.lock();
4409 if self
4410 .version
4411 .changed_since(tree_sitter_data.chunks.version())
4412 {
4413 *tree_sitter_data = TreeSitterData::new(self.text.clone());
4414 }
4415 tree_sitter_data
4416 }
4417
4418 pub fn all_bracket_ranges(
4419 &self,
4420 range: Range<usize>,
4421 ) -> impl Iterator<Item = BracketMatch<usize>> {
4422 self.fetch_bracket_ranges(range.clone(), None)
4423 .into_values()
4424 .flatten()
4425 .filter(move |bracket_match| {
4426 let bracket_range = bracket_match.open_range.start..bracket_match.close_range.end;
4427 bracket_range.overlaps(&range)
4428 })
4429 }
4430
4431 /// Returns bracket range pairs overlapping or adjacent to `range`
4432 pub fn bracket_ranges<T: ToOffset>(
4433 &self,
4434 range: Range<T>,
4435 ) -> impl Iterator<Item = BracketMatch<usize>> + '_ {
4436 // Find bracket pairs that *inclusively* contain the given range.
4437 let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self);
4438 self.all_bracket_ranges(range)
4439 .filter(|pair| !pair.newline_only)
4440 }
4441
4442 pub fn debug_variables_query<T: ToOffset>(
4443 &self,
4444 range: Range<T>,
4445 ) -> impl Iterator<Item = (Range<usize>, DebuggerTextObject)> + '_ {
4446 let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self);
4447
4448 let mut matches = self.syntax.matches_with_options(
4449 range.clone(),
4450 &self.text,
4451 TreeSitterOptions::default(),
4452 |grammar| grammar.debug_variables_config.as_ref().map(|c| &c.query),
4453 );
4454
4455 let configs = matches
4456 .grammars()
4457 .iter()
4458 .map(|grammar| grammar.debug_variables_config.as_ref())
4459 .collect::<Vec<_>>();
4460
4461 let mut captures = Vec::<(Range<usize>, DebuggerTextObject)>::new();
4462
4463 iter::from_fn(move || {
4464 loop {
4465 while let Some(capture) = captures.pop() {
4466 if capture.0.overlaps(&range) {
4467 return Some(capture);
4468 }
4469 }
4470
4471 let mat = matches.peek()?;
4472
4473 let Some(config) = configs[mat.grammar_index].as_ref() else {
4474 matches.advance();
4475 continue;
4476 };
4477
4478 for capture in mat.captures {
4479 let Some(ix) = config
4480 .objects_by_capture_ix
4481 .binary_search_by_key(&capture.index, |e| e.0)
4482 .ok()
4483 else {
4484 continue;
4485 };
4486 let text_object = config.objects_by_capture_ix[ix].1;
4487 let byte_range = capture.node.byte_range();
4488
4489 let mut found = false;
4490 for (range, existing) in captures.iter_mut() {
4491 if existing == &text_object {
4492 range.start = range.start.min(byte_range.start);
4493 range.end = range.end.max(byte_range.end);
4494 found = true;
4495 break;
4496 }
4497 }
4498
4499 if !found {
4500 captures.push((byte_range, text_object));
4501 }
4502 }
4503
4504 matches.advance();
4505 }
4506 })
4507 }
4508
4509 pub fn text_object_ranges<T: ToOffset>(
4510 &self,
4511 range: Range<T>,
4512 options: TreeSitterOptions,
4513 ) -> impl Iterator<Item = (Range<usize>, TextObject)> + '_ {
4514 let range =
4515 range.start.to_previous_offset(self)..self.len().min(range.end.to_next_offset(self));
4516
4517 let mut matches =
4518 self.syntax
4519 .matches_with_options(range.clone(), &self.text, options, |grammar| {
4520 grammar.text_object_config.as_ref().map(|c| &c.query)
4521 });
4522
4523 let configs = matches
4524 .grammars()
4525 .iter()
4526 .map(|grammar| grammar.text_object_config.as_ref())
4527 .collect::<Vec<_>>();
4528
4529 let mut captures = Vec::<(Range<usize>, TextObject)>::new();
4530
4531 iter::from_fn(move || {
4532 loop {
4533 while let Some(capture) = captures.pop() {
4534 if capture.0.overlaps(&range) {
4535 return Some(capture);
4536 }
4537 }
4538
4539 let mat = matches.peek()?;
4540
4541 let Some(config) = configs[mat.grammar_index].as_ref() else {
4542 matches.advance();
4543 continue;
4544 };
4545
4546 for capture in mat.captures {
4547 let Some(ix) = config
4548 .text_objects_by_capture_ix
4549 .binary_search_by_key(&capture.index, |e| e.0)
4550 .ok()
4551 else {
4552 continue;
4553 };
4554 let text_object = config.text_objects_by_capture_ix[ix].1;
4555 let byte_range = capture.node.byte_range();
4556
4557 let mut found = false;
4558 for (range, existing) in captures.iter_mut() {
4559 if existing == &text_object {
4560 range.start = range.start.min(byte_range.start);
4561 range.end = range.end.max(byte_range.end);
4562 found = true;
4563 break;
4564 }
4565 }
4566
4567 if !found {
4568 captures.push((byte_range, text_object));
4569 }
4570 }
4571
4572 matches.advance();
4573 }
4574 })
4575 }
4576
4577 /// Returns enclosing bracket ranges containing the given range
4578 pub fn enclosing_bracket_ranges<T: ToOffset>(
4579 &self,
4580 range: Range<T>,
4581 ) -> impl Iterator<Item = BracketMatch<usize>> + '_ {
4582 let range = range.start.to_offset(self)..range.end.to_offset(self);
4583
4584 let result: Vec<_> = self.bracket_ranges(range.clone()).collect();
4585 let max_depth = result
4586 .iter()
4587 .map(|mat| mat.syntax_layer_depth)
4588 .max()
4589 .unwrap_or(0);
4590 result.into_iter().filter(move |pair| {
4591 pair.open_range.start <= range.start
4592 && pair.close_range.end >= range.end
4593 && pair.syntax_layer_depth == max_depth
4594 })
4595 }
4596
4597 /// Returns the smallest enclosing bracket ranges containing the given range or None if no brackets contain range
4598 ///
4599 /// Can optionally pass a range_filter to filter the ranges of brackets to consider
4600 pub fn innermost_enclosing_bracket_ranges<T: ToOffset>(
4601 &self,
4602 range: Range<T>,
4603 range_filter: Option<&dyn Fn(Range<usize>, Range<usize>) -> bool>,
4604 ) -> Option<(Range<usize>, Range<usize>)> {
4605 let range = range.start.to_offset(self)..range.end.to_offset(self);
4606
4607 // Get the ranges of the innermost pair of brackets.
4608 let mut result: Option<(Range<usize>, Range<usize>)> = None;
4609
4610 for pair in self.enclosing_bracket_ranges(range) {
4611 if let Some(range_filter) = range_filter
4612 && !range_filter(pair.open_range.clone(), pair.close_range.clone())
4613 {
4614 continue;
4615 }
4616
4617 let len = pair.close_range.end - pair.open_range.start;
4618
4619 if let Some((existing_open, existing_close)) = &result {
4620 let existing_len = existing_close.end - existing_open.start;
4621 if len > existing_len {
4622 continue;
4623 }
4624 }
4625
4626 result = Some((pair.open_range, pair.close_range));
4627 }
4628
4629 result
4630 }
4631
4632 /// Returns anchor ranges for any matches of the redaction query.
4633 /// The buffer can be associated with multiple languages, and the redaction query associated with each
4634 /// will be run on the relevant section of the buffer.
4635 pub fn redacted_ranges<T: ToOffset>(
4636 &self,
4637 range: Range<T>,
4638 ) -> impl Iterator<Item = Range<usize>> + '_ {
4639 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4640 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4641 grammar
4642 .redactions_config
4643 .as_ref()
4644 .map(|config| &config.query)
4645 });
4646
4647 let configs = syntax_matches
4648 .grammars()
4649 .iter()
4650 .map(|grammar| grammar.redactions_config.as_ref())
4651 .collect::<Vec<_>>();
4652
4653 iter::from_fn(move || {
4654 let redacted_range = syntax_matches
4655 .peek()
4656 .and_then(|mat| {
4657 configs[mat.grammar_index].and_then(|config| {
4658 mat.captures
4659 .iter()
4660 .find(|capture| capture.index == config.redaction_capture_ix)
4661 })
4662 })
4663 .map(|mat| mat.node.byte_range());
4664 syntax_matches.advance();
4665 redacted_range
4666 })
4667 }
4668
4669 pub fn injections_intersecting_range<T: ToOffset>(
4670 &self,
4671 range: Range<T>,
4672 ) -> impl Iterator<Item = (Range<usize>, &Arc<Language>)> + '_ {
4673 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4674
4675 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4676 grammar
4677 .injection_config
4678 .as_ref()
4679 .map(|config| &config.query)
4680 });
4681
4682 let configs = syntax_matches
4683 .grammars()
4684 .iter()
4685 .map(|grammar| grammar.injection_config.as_ref())
4686 .collect::<Vec<_>>();
4687
4688 iter::from_fn(move || {
4689 let ranges = syntax_matches.peek().and_then(|mat| {
4690 let config = &configs[mat.grammar_index]?;
4691 let content_capture_range = mat.captures.iter().find_map(|capture| {
4692 if capture.index == config.content_capture_ix {
4693 Some(capture.node.byte_range())
4694 } else {
4695 None
4696 }
4697 })?;
4698 let language = self.language_at(content_capture_range.start)?;
4699 Some((content_capture_range, language))
4700 });
4701 syntax_matches.advance();
4702 ranges
4703 })
4704 }
4705
4706 pub fn runnable_ranges(
4707 &self,
4708 offset_range: Range<usize>,
4709 ) -> impl Iterator<Item = RunnableRange> + '_ {
4710 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4711 grammar.runnable_config.as_ref().map(|config| &config.query)
4712 });
4713
4714 let test_configs = syntax_matches
4715 .grammars()
4716 .iter()
4717 .map(|grammar| grammar.runnable_config.as_ref())
4718 .collect::<Vec<_>>();
4719
4720 iter::from_fn(move || {
4721 loop {
4722 let mat = syntax_matches.peek()?;
4723
4724 let test_range = test_configs[mat.grammar_index].and_then(|test_configs| {
4725 let mut run_range = None;
4726 let full_range = mat.captures.iter().fold(
4727 Range {
4728 start: usize::MAX,
4729 end: 0,
4730 },
4731 |mut acc, next| {
4732 let byte_range = next.node.byte_range();
4733 if acc.start > byte_range.start {
4734 acc.start = byte_range.start;
4735 }
4736 if acc.end < byte_range.end {
4737 acc.end = byte_range.end;
4738 }
4739 acc
4740 },
4741 );
4742 if full_range.start > full_range.end {
4743 // We did not find a full spanning range of this match.
4744 return None;
4745 }
4746 let extra_captures: SmallVec<[_; 1]> =
4747 SmallVec::from_iter(mat.captures.iter().filter_map(|capture| {
4748 test_configs
4749 .extra_captures
4750 .get(capture.index as usize)
4751 .cloned()
4752 .and_then(|tag_name| match tag_name {
4753 RunnableCapture::Named(name) => {
4754 Some((capture.node.byte_range(), name))
4755 }
4756 RunnableCapture::Run => {
4757 let _ = run_range.insert(capture.node.byte_range());
4758 None
4759 }
4760 })
4761 }));
4762 let run_range = run_range?;
4763 let tags = test_configs
4764 .query
4765 .property_settings(mat.pattern_index)
4766 .iter()
4767 .filter_map(|property| {
4768 if *property.key == *"tag" {
4769 property
4770 .value
4771 .as_ref()
4772 .map(|value| RunnableTag(value.to_string().into()))
4773 } else {
4774 None
4775 }
4776 })
4777 .collect();
4778 let extra_captures = extra_captures
4779 .into_iter()
4780 .map(|(range, name)| {
4781 (
4782 name.to_string(),
4783 self.text_for_range(range).collect::<String>(),
4784 )
4785 })
4786 .collect();
4787 // All tags should have the same range.
4788 Some(RunnableRange {
4789 run_range,
4790 full_range,
4791 runnable: Runnable {
4792 tags,
4793 language: mat.language,
4794 buffer: self.remote_id(),
4795 },
4796 extra_captures,
4797 buffer_id: self.remote_id(),
4798 })
4799 });
4800
4801 syntax_matches.advance();
4802 if test_range.is_some() {
4803 // It's fine for us to short-circuit on .peek()? returning None. We don't want to return None from this iter if we
4804 // had a capture that did not contain a run marker, hence we'll just loop around for the next capture.
4805 return test_range;
4806 }
4807 }
4808 })
4809 }
4810
4811 /// Returns selections for remote peers intersecting the given range.
4812 #[allow(clippy::type_complexity)]
4813 pub fn selections_in_range(
4814 &self,
4815 range: Range<Anchor>,
4816 include_local: bool,
4817 ) -> impl Iterator<
4818 Item = (
4819 ReplicaId,
4820 bool,
4821 CursorShape,
4822 impl Iterator<Item = &Selection<Anchor>> + '_,
4823 ),
4824 > + '_ {
4825 self.remote_selections
4826 .iter()
4827 .filter(move |(replica_id, set)| {
4828 (include_local || **replica_id != self.text.replica_id())
4829 && !set.selections.is_empty()
4830 })
4831 .map(move |(replica_id, set)| {
4832 let start_ix = match set.selections.binary_search_by(|probe| {
4833 probe.end.cmp(&range.start, self).then(Ordering::Greater)
4834 }) {
4835 Ok(ix) | Err(ix) => ix,
4836 };
4837 let end_ix = match set.selections.binary_search_by(|probe| {
4838 probe.start.cmp(&range.end, self).then(Ordering::Less)
4839 }) {
4840 Ok(ix) | Err(ix) => ix,
4841 };
4842
4843 (
4844 *replica_id,
4845 set.line_mode,
4846 set.cursor_shape,
4847 set.selections[start_ix..end_ix].iter(),
4848 )
4849 })
4850 }
4851
4852 /// Returns if the buffer contains any diagnostics.
4853 pub fn has_diagnostics(&self) -> bool {
4854 !self.diagnostics.is_empty()
4855 }
4856
4857 /// Returns all the diagnostics intersecting the given range.
4858 pub fn diagnostics_in_range<'a, T, O>(
4859 &'a self,
4860 search_range: Range<T>,
4861 reversed: bool,
4862 ) -> impl 'a + Iterator<Item = DiagnosticEntryRef<'a, O>>
4863 where
4864 T: 'a + Clone + ToOffset,
4865 O: 'a + FromAnchor,
4866 {
4867 let mut iterators: Vec<_> = self
4868 .diagnostics
4869 .iter()
4870 .map(|(_, collection)| {
4871 collection
4872 .range::<T, text::Anchor>(search_range.clone(), self, true, reversed)
4873 .peekable()
4874 })
4875 .collect();
4876
4877 std::iter::from_fn(move || {
4878 let (next_ix, _) = iterators
4879 .iter_mut()
4880 .enumerate()
4881 .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
4882 .min_by(|(_, a), (_, b)| {
4883 let cmp = a
4884 .range
4885 .start
4886 .cmp(&b.range.start, self)
4887 // when range is equal, sort by diagnostic severity
4888 .then(a.diagnostic.severity.cmp(&b.diagnostic.severity))
4889 // and stabilize order with group_id
4890 .then(a.diagnostic.group_id.cmp(&b.diagnostic.group_id));
4891 if reversed { cmp.reverse() } else { cmp }
4892 })?;
4893 iterators[next_ix]
4894 .next()
4895 .map(
4896 |DiagnosticEntryRef { range, diagnostic }| DiagnosticEntryRef {
4897 diagnostic,
4898 range: FromAnchor::from_anchor(&range.start, self)
4899 ..FromAnchor::from_anchor(&range.end, self),
4900 },
4901 )
4902 })
4903 }
4904
4905 /// Raw access to the diagnostic sets. Typically `diagnostic_groups` or `diagnostic_group`
4906 /// should be used instead.
4907 pub fn diagnostic_sets(&self) -> &SmallVec<[(LanguageServerId, DiagnosticSet); 2]> {
4908 &self.diagnostics
4909 }
4910
4911 /// Returns all the diagnostic groups associated with the given
4912 /// language server ID. If no language server ID is provided,
4913 /// all diagnostics groups are returned.
4914 pub fn diagnostic_groups(
4915 &self,
4916 language_server_id: Option<LanguageServerId>,
4917 ) -> Vec<(LanguageServerId, DiagnosticGroup<'_, Anchor>)> {
4918 let mut groups = Vec::new();
4919
4920 if let Some(language_server_id) = language_server_id {
4921 if let Ok(ix) = self
4922 .diagnostics
4923 .binary_search_by_key(&language_server_id, |e| e.0)
4924 {
4925 self.diagnostics[ix]
4926 .1
4927 .groups(language_server_id, &mut groups, self);
4928 }
4929 } else {
4930 for (language_server_id, diagnostics) in self.diagnostics.iter() {
4931 diagnostics.groups(*language_server_id, &mut groups, self);
4932 }
4933 }
4934
4935 groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
4936 let a_start = &group_a.entries[group_a.primary_ix].range.start;
4937 let b_start = &group_b.entries[group_b.primary_ix].range.start;
4938 a_start.cmp(b_start, self).then_with(|| id_a.cmp(id_b))
4939 });
4940
4941 groups
4942 }
4943
4944 /// Returns an iterator over the diagnostics for the given group.
4945 pub fn diagnostic_group<O>(
4946 &self,
4947 group_id: usize,
4948 ) -> impl Iterator<Item = DiagnosticEntryRef<'_, O>> + use<'_, O>
4949 where
4950 O: FromAnchor + 'static,
4951 {
4952 self.diagnostics
4953 .iter()
4954 .flat_map(move |(_, set)| set.group(group_id, self))
4955 }
4956
4957 /// An integer version number that accounts for all updates besides
4958 /// the buffer's text itself (which is versioned via a version vector).
4959 pub fn non_text_state_update_count(&self) -> usize {
4960 self.non_text_state_update_count
4961 }
4962
4963 /// An integer version that changes when the buffer's syntax changes.
4964 pub fn syntax_update_count(&self) -> usize {
4965 self.syntax.update_count()
4966 }
4967
4968 /// Returns a snapshot of underlying file.
4969 pub fn file(&self) -> Option<&Arc<dyn File>> {
4970 self.file.as_ref()
4971 }
4972
4973 pub fn resolve_file_path(&self, include_root: bool, cx: &App) -> Option<String> {
4974 if let Some(file) = self.file() {
4975 if file.path().file_name().is_none() || include_root {
4976 Some(file.full_path(cx).to_string_lossy().into_owned())
4977 } else {
4978 Some(file.path().display(file.path_style(cx)).to_string())
4979 }
4980 } else {
4981 None
4982 }
4983 }
4984
4985 pub fn words_in_range(&self, query: WordsQuery) -> BTreeMap<String, Range<Anchor>> {
4986 let query_str = query.fuzzy_contents;
4987 if query_str.is_some_and(|query| query.is_empty()) {
4988 return BTreeMap::default();
4989 }
4990
4991 let classifier = CharClassifier::new(self.language.clone().map(|language| LanguageScope {
4992 language,
4993 override_id: None,
4994 }));
4995
4996 let mut query_ix = 0;
4997 let query_chars = query_str.map(|query| query.chars().collect::<Vec<_>>());
4998 let query_len = query_chars.as_ref().map_or(0, |query| query.len());
4999
5000 let mut words = BTreeMap::default();
5001 let mut current_word_start_ix = None;
5002 let mut chunk_ix = query.range.start;
5003 for chunk in self.chunks(query.range, false) {
5004 for (i, c) in chunk.text.char_indices() {
5005 let ix = chunk_ix + i;
5006 if classifier.is_word(c) {
5007 if current_word_start_ix.is_none() {
5008 current_word_start_ix = Some(ix);
5009 }
5010
5011 if let Some(query_chars) = &query_chars
5012 && query_ix < query_len
5013 && c.to_lowercase().eq(query_chars[query_ix].to_lowercase())
5014 {
5015 query_ix += 1;
5016 }
5017 continue;
5018 } else if let Some(word_start) = current_word_start_ix.take()
5019 && query_ix == query_len
5020 {
5021 let word_range = self.anchor_before(word_start)..self.anchor_after(ix);
5022 let mut word_text = self.text_for_range(word_start..ix).peekable();
5023 let first_char = word_text
5024 .peek()
5025 .and_then(|first_chunk| first_chunk.chars().next());
5026 // Skip empty and "words" starting with digits as a heuristic to reduce useless completions
5027 if !query.skip_digits
5028 || first_char.is_none_or(|first_char| !first_char.is_digit(10))
5029 {
5030 words.insert(word_text.collect(), word_range);
5031 }
5032 }
5033 query_ix = 0;
5034 }
5035 chunk_ix += chunk.text.len();
5036 }
5037
5038 words
5039 }
5040}
5041
5042pub struct WordsQuery<'a> {
5043 /// Only returns words with all chars from the fuzzy string in them.
5044 pub fuzzy_contents: Option<&'a str>,
5045 /// Skips words that start with a digit.
5046 pub skip_digits: bool,
5047 /// Buffer offset range, to look for words.
5048 pub range: Range<usize>,
5049}
5050
5051fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
5052 indent_size_for_text(text.chars_at(Point::new(row, 0)))
5053}
5054
5055fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
5056 let mut result = IndentSize::spaces(0);
5057 for c in text {
5058 let kind = match c {
5059 ' ' => IndentKind::Space,
5060 '\t' => IndentKind::Tab,
5061 _ => break,
5062 };
5063 if result.len == 0 {
5064 result.kind = kind;
5065 }
5066 result.len += 1;
5067 }
5068 result
5069}
5070
5071impl Clone for BufferSnapshot {
5072 fn clone(&self) -> Self {
5073 Self {
5074 text: self.text.clone(),
5075 syntax: self.syntax.clone(),
5076 file: self.file.clone(),
5077 remote_selections: self.remote_selections.clone(),
5078 diagnostics: self.diagnostics.clone(),
5079 language: self.language.clone(),
5080 tree_sitter_data: self.tree_sitter_data.clone(),
5081 non_text_state_update_count: self.non_text_state_update_count,
5082 }
5083 }
5084}
5085
5086impl Deref for BufferSnapshot {
5087 type Target = text::BufferSnapshot;
5088
5089 fn deref(&self) -> &Self::Target {
5090 &self.text
5091 }
5092}
5093
5094unsafe impl Send for BufferChunks<'_> {}
5095
5096impl<'a> BufferChunks<'a> {
5097 pub(crate) fn new(
5098 text: &'a Rope,
5099 range: Range<usize>,
5100 syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
5101 diagnostics: bool,
5102 buffer_snapshot: Option<&'a BufferSnapshot>,
5103 ) -> Self {
5104 let mut highlights = None;
5105 if let Some((captures, highlight_maps)) = syntax {
5106 highlights = Some(BufferChunkHighlights {
5107 captures,
5108 next_capture: None,
5109 stack: Default::default(),
5110 highlight_maps,
5111 })
5112 }
5113
5114 let diagnostic_endpoints = diagnostics.then(|| Vec::new().into_iter().peekable());
5115 let chunks = text.chunks_in_range(range.clone());
5116
5117 let mut this = BufferChunks {
5118 range,
5119 buffer_snapshot,
5120 chunks,
5121 diagnostic_endpoints,
5122 error_depth: 0,
5123 warning_depth: 0,
5124 information_depth: 0,
5125 hint_depth: 0,
5126 unnecessary_depth: 0,
5127 underline: true,
5128 highlights,
5129 };
5130 this.initialize_diagnostic_endpoints();
5131 this
5132 }
5133
5134 /// Seeks to the given byte offset in the buffer.
5135 pub fn seek(&mut self, range: Range<usize>) {
5136 let old_range = std::mem::replace(&mut self.range, range.clone());
5137 self.chunks.set_range(self.range.clone());
5138 if let Some(highlights) = self.highlights.as_mut() {
5139 if old_range.start <= self.range.start && old_range.end >= self.range.end {
5140 // Reuse existing highlights stack, as the new range is a subrange of the old one.
5141 highlights
5142 .stack
5143 .retain(|(end_offset, _)| *end_offset > range.start);
5144 if let Some(capture) = &highlights.next_capture
5145 && range.start >= capture.node.start_byte()
5146 {
5147 let next_capture_end = capture.node.end_byte();
5148 if range.start < next_capture_end {
5149 highlights.stack.push((
5150 next_capture_end,
5151 highlights.highlight_maps[capture.grammar_index].get(capture.index),
5152 ));
5153 }
5154 highlights.next_capture.take();
5155 }
5156 } else if let Some(snapshot) = self.buffer_snapshot {
5157 let (captures, highlight_maps) = snapshot.get_highlights(self.range.clone());
5158 *highlights = BufferChunkHighlights {
5159 captures,
5160 next_capture: None,
5161 stack: Default::default(),
5162 highlight_maps,
5163 };
5164 } else {
5165 // We cannot obtain new highlights for a language-aware buffer iterator, as we don't have a buffer snapshot.
5166 // Seeking such BufferChunks is not supported.
5167 debug_assert!(
5168 false,
5169 "Attempted to seek on a language-aware buffer iterator without associated buffer snapshot"
5170 );
5171 }
5172
5173 highlights.captures.set_byte_range(self.range.clone());
5174 self.initialize_diagnostic_endpoints();
5175 }
5176 }
5177
5178 fn initialize_diagnostic_endpoints(&mut self) {
5179 if let Some(diagnostics) = self.diagnostic_endpoints.as_mut()
5180 && let Some(buffer) = self.buffer_snapshot
5181 {
5182 let mut diagnostic_endpoints = Vec::new();
5183 for entry in buffer.diagnostics_in_range::<_, usize>(self.range.clone(), false) {
5184 diagnostic_endpoints.push(DiagnosticEndpoint {
5185 offset: entry.range.start,
5186 is_start: true,
5187 severity: entry.diagnostic.severity,
5188 is_unnecessary: entry.diagnostic.is_unnecessary,
5189 underline: entry.diagnostic.underline,
5190 });
5191 diagnostic_endpoints.push(DiagnosticEndpoint {
5192 offset: entry.range.end,
5193 is_start: false,
5194 severity: entry.diagnostic.severity,
5195 is_unnecessary: entry.diagnostic.is_unnecessary,
5196 underline: entry.diagnostic.underline,
5197 });
5198 }
5199 diagnostic_endpoints
5200 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
5201 *diagnostics = diagnostic_endpoints.into_iter().peekable();
5202 self.hint_depth = 0;
5203 self.error_depth = 0;
5204 self.warning_depth = 0;
5205 self.information_depth = 0;
5206 }
5207 }
5208
5209 /// The current byte offset in the buffer.
5210 pub fn offset(&self) -> usize {
5211 self.range.start
5212 }
5213
5214 pub fn range(&self) -> Range<usize> {
5215 self.range.clone()
5216 }
5217
5218 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
5219 let depth = match endpoint.severity {
5220 DiagnosticSeverity::ERROR => &mut self.error_depth,
5221 DiagnosticSeverity::WARNING => &mut self.warning_depth,
5222 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
5223 DiagnosticSeverity::HINT => &mut self.hint_depth,
5224 _ => return,
5225 };
5226 if endpoint.is_start {
5227 *depth += 1;
5228 } else {
5229 *depth -= 1;
5230 }
5231
5232 if endpoint.is_unnecessary {
5233 if endpoint.is_start {
5234 self.unnecessary_depth += 1;
5235 } else {
5236 self.unnecessary_depth -= 1;
5237 }
5238 }
5239 }
5240
5241 fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
5242 if self.error_depth > 0 {
5243 Some(DiagnosticSeverity::ERROR)
5244 } else if self.warning_depth > 0 {
5245 Some(DiagnosticSeverity::WARNING)
5246 } else if self.information_depth > 0 {
5247 Some(DiagnosticSeverity::INFORMATION)
5248 } else if self.hint_depth > 0 {
5249 Some(DiagnosticSeverity::HINT)
5250 } else {
5251 None
5252 }
5253 }
5254
5255 fn current_code_is_unnecessary(&self) -> bool {
5256 self.unnecessary_depth > 0
5257 }
5258}
5259
5260impl<'a> Iterator for BufferChunks<'a> {
5261 type Item = Chunk<'a>;
5262
5263 fn next(&mut self) -> Option<Self::Item> {
5264 let mut next_capture_start = usize::MAX;
5265 let mut next_diagnostic_endpoint = usize::MAX;
5266
5267 if let Some(highlights) = self.highlights.as_mut() {
5268 while let Some((parent_capture_end, _)) = highlights.stack.last() {
5269 if *parent_capture_end <= self.range.start {
5270 highlights.stack.pop();
5271 } else {
5272 break;
5273 }
5274 }
5275
5276 if highlights.next_capture.is_none() {
5277 highlights.next_capture = highlights.captures.next();
5278 }
5279
5280 while let Some(capture) = highlights.next_capture.as_ref() {
5281 if self.range.start < capture.node.start_byte() {
5282 next_capture_start = capture.node.start_byte();
5283 break;
5284 } else {
5285 let highlight_id =
5286 highlights.highlight_maps[capture.grammar_index].get(capture.index);
5287 highlights
5288 .stack
5289 .push((capture.node.end_byte(), highlight_id));
5290 highlights.next_capture = highlights.captures.next();
5291 }
5292 }
5293 }
5294
5295 let mut diagnostic_endpoints = std::mem::take(&mut self.diagnostic_endpoints);
5296 if let Some(diagnostic_endpoints) = diagnostic_endpoints.as_mut() {
5297 while let Some(endpoint) = diagnostic_endpoints.peek().copied() {
5298 if endpoint.offset <= self.range.start {
5299 self.update_diagnostic_depths(endpoint);
5300 diagnostic_endpoints.next();
5301 self.underline = endpoint.underline;
5302 } else {
5303 next_diagnostic_endpoint = endpoint.offset;
5304 break;
5305 }
5306 }
5307 }
5308 self.diagnostic_endpoints = diagnostic_endpoints;
5309
5310 if let Some(ChunkBitmaps {
5311 text: chunk,
5312 chars: chars_map,
5313 tabs,
5314 }) = self.chunks.peek_with_bitmaps()
5315 {
5316 let chunk_start = self.range.start;
5317 let mut chunk_end = (self.chunks.offset() + chunk.len())
5318 .min(next_capture_start)
5319 .min(next_diagnostic_endpoint);
5320 let mut highlight_id = None;
5321 if let Some(highlights) = self.highlights.as_ref()
5322 && let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last()
5323 {
5324 chunk_end = chunk_end.min(*parent_capture_end);
5325 highlight_id = Some(*parent_highlight_id);
5326 }
5327 let bit_start = chunk_start - self.chunks.offset();
5328 let bit_end = chunk_end - self.chunks.offset();
5329
5330 let slice = &chunk[bit_start..bit_end];
5331
5332 let mask = 1u128.unbounded_shl(bit_end as u32).wrapping_sub(1);
5333 let tabs = (tabs >> bit_start) & mask;
5334 let chars = (chars_map >> bit_start) & mask;
5335
5336 self.range.start = chunk_end;
5337 if self.range.start == self.chunks.offset() + chunk.len() {
5338 self.chunks.next().unwrap();
5339 }
5340
5341 Some(Chunk {
5342 text: slice,
5343 syntax_highlight_id: highlight_id,
5344 underline: self.underline,
5345 diagnostic_severity: self.current_diagnostic_severity(),
5346 is_unnecessary: self.current_code_is_unnecessary(),
5347 tabs,
5348 chars,
5349 ..Chunk::default()
5350 })
5351 } else {
5352 None
5353 }
5354 }
5355}
5356
5357impl operation_queue::Operation for Operation {
5358 fn lamport_timestamp(&self) -> clock::Lamport {
5359 match self {
5360 Operation::Buffer(_) => {
5361 unreachable!("buffer operations should never be deferred at this layer")
5362 }
5363 Operation::UpdateDiagnostics {
5364 lamport_timestamp, ..
5365 }
5366 | Operation::UpdateSelections {
5367 lamport_timestamp, ..
5368 }
5369 | Operation::UpdateCompletionTriggers {
5370 lamport_timestamp, ..
5371 }
5372 | Operation::UpdateLineEnding {
5373 lamport_timestamp, ..
5374 } => *lamport_timestamp,
5375 }
5376 }
5377}
5378
5379impl Default for Diagnostic {
5380 fn default() -> Self {
5381 Self {
5382 source: Default::default(),
5383 source_kind: DiagnosticSourceKind::Other,
5384 code: None,
5385 code_description: None,
5386 severity: DiagnosticSeverity::ERROR,
5387 message: Default::default(),
5388 markdown: None,
5389 group_id: 0,
5390 is_primary: false,
5391 is_disk_based: false,
5392 is_unnecessary: false,
5393 underline: true,
5394 data: None,
5395 registration_id: None,
5396 }
5397 }
5398}
5399
5400impl IndentSize {
5401 /// Returns an [`IndentSize`] representing the given spaces.
5402 pub fn spaces(len: u32) -> Self {
5403 Self {
5404 len,
5405 kind: IndentKind::Space,
5406 }
5407 }
5408
5409 /// Returns an [`IndentSize`] representing a tab.
5410 pub fn tab() -> Self {
5411 Self {
5412 len: 1,
5413 kind: IndentKind::Tab,
5414 }
5415 }
5416
5417 /// An iterator over the characters represented by this [`IndentSize`].
5418 pub fn chars(&self) -> impl Iterator<Item = char> {
5419 iter::repeat(self.char()).take(self.len as usize)
5420 }
5421
5422 /// The character representation of this [`IndentSize`].
5423 pub fn char(&self) -> char {
5424 match self.kind {
5425 IndentKind::Space => ' ',
5426 IndentKind::Tab => '\t',
5427 }
5428 }
5429
5430 /// Consumes the current [`IndentSize`] and returns a new one that has
5431 /// been shrunk or enlarged by the given size along the given direction.
5432 pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
5433 match direction {
5434 Ordering::Less => {
5435 if self.kind == size.kind && self.len >= size.len {
5436 self.len -= size.len;
5437 }
5438 }
5439 Ordering::Equal => {}
5440 Ordering::Greater => {
5441 if self.len == 0 {
5442 self = size;
5443 } else if self.kind == size.kind {
5444 self.len += size.len;
5445 }
5446 }
5447 }
5448 self
5449 }
5450
5451 pub fn len_with_expanded_tabs(&self, tab_size: NonZeroU32) -> usize {
5452 match self.kind {
5453 IndentKind::Space => self.len as usize,
5454 IndentKind::Tab => self.len as usize * tab_size.get() as usize,
5455 }
5456 }
5457}
5458
5459#[cfg(any(test, feature = "test-support"))]
5460pub struct TestFile {
5461 pub path: Arc<RelPath>,
5462 pub root_name: String,
5463 pub local_root: Option<PathBuf>,
5464}
5465
5466#[cfg(any(test, feature = "test-support"))]
5467impl File for TestFile {
5468 fn path(&self) -> &Arc<RelPath> {
5469 &self.path
5470 }
5471
5472 fn full_path(&self, _: &gpui::App) -> PathBuf {
5473 PathBuf::from(self.root_name.clone()).join(self.path.as_std_path())
5474 }
5475
5476 fn as_local(&self) -> Option<&dyn LocalFile> {
5477 if self.local_root.is_some() {
5478 Some(self)
5479 } else {
5480 None
5481 }
5482 }
5483
5484 fn disk_state(&self) -> DiskState {
5485 unimplemented!()
5486 }
5487
5488 fn file_name<'a>(&'a self, _: &'a gpui::App) -> &'a str {
5489 self.path().file_name().unwrap_or(self.root_name.as_ref())
5490 }
5491
5492 fn worktree_id(&self, _: &App) -> WorktreeId {
5493 WorktreeId::from_usize(0)
5494 }
5495
5496 fn to_proto(&self, _: &App) -> rpc::proto::File {
5497 unimplemented!()
5498 }
5499
5500 fn is_private(&self) -> bool {
5501 false
5502 }
5503
5504 fn path_style(&self, _cx: &App) -> PathStyle {
5505 PathStyle::local()
5506 }
5507}
5508
5509#[cfg(any(test, feature = "test-support"))]
5510impl LocalFile for TestFile {
5511 fn abs_path(&self, _cx: &App) -> PathBuf {
5512 PathBuf::from(self.local_root.as_ref().unwrap())
5513 .join(&self.root_name)
5514 .join(self.path.as_std_path())
5515 }
5516
5517 fn load(&self, _cx: &App) -> Task<Result<String>> {
5518 unimplemented!()
5519 }
5520
5521 fn load_bytes(&self, _cx: &App) -> Task<Result<Vec<u8>>> {
5522 unimplemented!()
5523 }
5524}
5525
5526pub(crate) fn contiguous_ranges(
5527 values: impl Iterator<Item = u32>,
5528 max_len: usize,
5529) -> impl Iterator<Item = Range<u32>> {
5530 let mut values = values;
5531 let mut current_range: Option<Range<u32>> = None;
5532 std::iter::from_fn(move || {
5533 loop {
5534 if let Some(value) = values.next() {
5535 if let Some(range) = &mut current_range
5536 && value == range.end
5537 && range.len() < max_len
5538 {
5539 range.end += 1;
5540 continue;
5541 }
5542
5543 let prev_range = current_range.clone();
5544 current_range = Some(value..(value + 1));
5545 if prev_range.is_some() {
5546 return prev_range;
5547 }
5548 } else {
5549 return current_range.take();
5550 }
5551 }
5552 })
5553}
5554
5555#[derive(Default, Debug)]
5556pub struct CharClassifier {
5557 scope: Option<LanguageScope>,
5558 scope_context: Option<CharScopeContext>,
5559 ignore_punctuation: bool,
5560}
5561
5562impl CharClassifier {
5563 pub fn new(scope: Option<LanguageScope>) -> Self {
5564 Self {
5565 scope,
5566 scope_context: None,
5567 ignore_punctuation: false,
5568 }
5569 }
5570
5571 pub fn scope_context(self, scope_context: Option<CharScopeContext>) -> Self {
5572 Self {
5573 scope_context,
5574 ..self
5575 }
5576 }
5577
5578 pub fn ignore_punctuation(self, ignore_punctuation: bool) -> Self {
5579 Self {
5580 ignore_punctuation,
5581 ..self
5582 }
5583 }
5584
5585 pub fn is_whitespace(&self, c: char) -> bool {
5586 self.kind(c) == CharKind::Whitespace
5587 }
5588
5589 pub fn is_word(&self, c: char) -> bool {
5590 self.kind(c) == CharKind::Word
5591 }
5592
5593 pub fn is_punctuation(&self, c: char) -> bool {
5594 self.kind(c) == CharKind::Punctuation
5595 }
5596
5597 pub fn kind_with(&self, c: char, ignore_punctuation: bool) -> CharKind {
5598 if c.is_alphanumeric() || c == '_' {
5599 return CharKind::Word;
5600 }
5601
5602 if let Some(scope) = &self.scope {
5603 let characters = match self.scope_context {
5604 Some(CharScopeContext::Completion) => scope.completion_query_characters(),
5605 Some(CharScopeContext::LinkedEdit) => scope.linked_edit_characters(),
5606 None => scope.word_characters(),
5607 };
5608 if let Some(characters) = characters
5609 && characters.contains(&c)
5610 {
5611 return CharKind::Word;
5612 }
5613 }
5614
5615 if c.is_whitespace() {
5616 return CharKind::Whitespace;
5617 }
5618
5619 if ignore_punctuation {
5620 CharKind::Word
5621 } else {
5622 CharKind::Punctuation
5623 }
5624 }
5625
5626 pub fn kind(&self, c: char) -> CharKind {
5627 self.kind_with(c, self.ignore_punctuation)
5628 }
5629}
5630
5631/// Find all of the ranges of whitespace that occur at the ends of lines
5632/// in the given rope.
5633///
5634/// This could also be done with a regex search, but this implementation
5635/// avoids copying text.
5636pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
5637 let mut ranges = Vec::new();
5638
5639 let mut offset = 0;
5640 let mut prev_chunk_trailing_whitespace_range = 0..0;
5641 for chunk in rope.chunks() {
5642 let mut prev_line_trailing_whitespace_range = 0..0;
5643 for (i, line) in chunk.split('\n').enumerate() {
5644 let line_end_offset = offset + line.len();
5645 let trimmed_line_len = line.trim_end_matches([' ', '\t']).len();
5646 let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
5647
5648 if i == 0 && trimmed_line_len == 0 {
5649 trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
5650 }
5651 if !prev_line_trailing_whitespace_range.is_empty() {
5652 ranges.push(prev_line_trailing_whitespace_range);
5653 }
5654
5655 offset = line_end_offset + 1;
5656 prev_line_trailing_whitespace_range = trailing_whitespace_range;
5657 }
5658
5659 offset -= 1;
5660 prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
5661 }
5662
5663 if !prev_chunk_trailing_whitespace_range.is_empty() {
5664 ranges.push(prev_chunk_trailing_whitespace_range);
5665 }
5666
5667 ranges
5668}