1pub use crate::{
2 Grammar, Language, LanguageRegistry,
3 diagnostic_set::DiagnosticSet,
4 highlight_map::{HighlightId, HighlightMap},
5 proto,
6};
7use crate::{
8 LanguageScope, Outline, OutlineConfig, RunnableCapture, RunnableTag, TextObject,
9 TreeSitterOptions,
10 diagnostic_set::{DiagnosticEntry, DiagnosticGroup},
11 language_settings::{LanguageSettings, language_settings},
12 outline::OutlineItem,
13 syntax_map::{
14 SyntaxLayer, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures, SyntaxMapMatch,
15 SyntaxMapMatches, SyntaxSnapshot, ToTreeSitterPoint,
16 },
17 task_context::RunnableRange,
18 text_diff::text_diff,
19};
20use anyhow::{Context as _, Result};
21use async_watch as watch;
22pub use clock::ReplicaId;
23use clock::{AGENT_REPLICA_ID, Lamport};
24use collections::HashMap;
25use fs::MTime;
26use futures::channel::oneshot;
27use gpui::{
28 App, AppContext as _, Context, Entity, EventEmitter, HighlightStyle, SharedString, StyledText,
29 Task, TaskLabel, TextStyle,
30};
31use lsp::{LanguageServerId, NumberOrString};
32use parking_lot::Mutex;
33use schemars::JsonSchema;
34use serde::{Deserialize, Serialize};
35use serde_json::Value;
36use settings::WorktreeId;
37use smallvec::SmallVec;
38use smol::future::yield_now;
39use std::{
40 any::Any,
41 borrow::Cow,
42 cell::Cell,
43 cmp::{self, Ordering, Reverse},
44 collections::{BTreeMap, BTreeSet},
45 ffi::OsStr,
46 future::Future,
47 iter::{self, Iterator, Peekable},
48 mem,
49 num::NonZeroU32,
50 ops::{Deref, Range},
51 path::{Path, PathBuf},
52 rc,
53 sync::{Arc, LazyLock},
54 time::{Duration, Instant},
55 vec,
56};
57use sum_tree::TreeMap;
58use text::operation_queue::OperationQueue;
59use text::*;
60pub use text::{
61 Anchor, Bias, Buffer as TextBuffer, BufferId, BufferSnapshot as TextBufferSnapshot, Edit,
62 LineIndent, OffsetRangeExt, OffsetUtf16, Patch, Point, PointUtf16, Rope, Selection,
63 SelectionGoal, Subscription, TextDimension, TextSummary, ToOffset, ToOffsetUtf16, ToPoint,
64 ToPointUtf16, Transaction, TransactionId, Unclipped,
65};
66use theme::{ActiveTheme as _, SyntaxTheme};
67#[cfg(any(test, feature = "test-support"))]
68use util::RandomCharIter;
69use util::{RangeExt, debug_panic, maybe};
70
71#[cfg(any(test, feature = "test-support"))]
72pub use {tree_sitter_python, tree_sitter_rust, tree_sitter_typescript};
73
74pub use lsp::DiagnosticSeverity;
75
76/// A label for the background task spawned by the buffer to compute
77/// a diff against the contents of its file.
78pub static BUFFER_DIFF_TASK: LazyLock<TaskLabel> = LazyLock::new(TaskLabel::new);
79
80/// Indicate whether a [`Buffer`] has permissions to edit.
81#[derive(PartialEq, Clone, Copy, Debug)]
82pub enum Capability {
83 /// The buffer is a mutable replica.
84 ReadWrite,
85 /// The buffer is a read-only replica.
86 ReadOnly,
87}
88
89pub type BufferRow = u32;
90
91/// An in-memory representation of a source code file, including its text,
92/// syntax trees, git status, and diagnostics.
93pub struct Buffer {
94 text: TextBuffer,
95 branch_state: Option<BufferBranchState>,
96 /// Filesystem state, `None` when there is no path.
97 file: Option<Arc<dyn File>>,
98 /// The mtime of the file when this buffer was last loaded from
99 /// or saved to disk.
100 saved_mtime: Option<MTime>,
101 /// The version vector when this buffer was last loaded from
102 /// or saved to disk.
103 saved_version: clock::Global,
104 preview_version: clock::Global,
105 transaction_depth: usize,
106 was_dirty_before_starting_transaction: Option<bool>,
107 reload_task: Option<Task<Result<()>>>,
108 language: Option<Arc<Language>>,
109 autoindent_requests: Vec<Arc<AutoindentRequest>>,
110 pending_autoindent: Option<Task<()>>,
111 sync_parse_timeout: Duration,
112 syntax_map: Mutex<SyntaxMap>,
113 reparse: Option<Task<()>>,
114 parse_status: (watch::Sender<ParseStatus>, watch::Receiver<ParseStatus>),
115 non_text_state_update_count: usize,
116 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
117 remote_selections: TreeMap<ReplicaId, SelectionSet>,
118 diagnostics_timestamp: clock::Lamport,
119 completion_triggers: BTreeSet<String>,
120 completion_triggers_per_language_server: HashMap<LanguageServerId, BTreeSet<String>>,
121 completion_triggers_timestamp: clock::Lamport,
122 deferred_ops: OperationQueue<Operation>,
123 capability: Capability,
124 has_conflict: bool,
125 /// Memoize calls to has_changes_since(saved_version).
126 /// The contents of a cell are (self.version, has_changes) at the time of a last call.
127 has_unsaved_edits: Cell<(clock::Global, bool)>,
128 change_bits: Vec<rc::Weak<Cell<bool>>>,
129 _subscriptions: Vec<gpui::Subscription>,
130 /// The result id received last time when pulling diagnostics for this buffer.
131 pull_diagnostics_result_id: Option<String>,
132}
133
134#[derive(Copy, Clone, Debug, PartialEq, Eq)]
135pub enum ParseStatus {
136 Idle,
137 Parsing,
138}
139
140struct BufferBranchState {
141 base_buffer: Entity<Buffer>,
142 merged_operations: Vec<Lamport>,
143}
144
145/// An immutable, cheaply cloneable representation of a fixed
146/// state of a buffer.
147pub struct BufferSnapshot {
148 pub text: text::BufferSnapshot,
149 pub(crate) syntax: SyntaxSnapshot,
150 file: Option<Arc<dyn File>>,
151 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
152 remote_selections: TreeMap<ReplicaId, SelectionSet>,
153 language: Option<Arc<Language>>,
154 non_text_state_update_count: usize,
155}
156
157/// The kind and amount of indentation in a particular line. For now,
158/// assumes that indentation is all the same character.
159#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
160pub struct IndentSize {
161 /// The number of bytes that comprise the indentation.
162 pub len: u32,
163 /// The kind of whitespace used for indentation.
164 pub kind: IndentKind,
165}
166
167/// A whitespace character that's used for indentation.
168#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
169pub enum IndentKind {
170 /// An ASCII space character.
171 #[default]
172 Space,
173 /// An ASCII tab character.
174 Tab,
175}
176
177/// The shape of a selection cursor.
178#[derive(Copy, Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq, JsonSchema)]
179#[serde(rename_all = "snake_case")]
180pub enum CursorShape {
181 /// A vertical bar
182 #[default]
183 Bar,
184 /// A block that surrounds the following character
185 Block,
186 /// An underline that runs along the following character
187 Underline,
188 /// A box drawn around the following character
189 Hollow,
190}
191
192#[derive(Clone, Debug)]
193struct SelectionSet {
194 line_mode: bool,
195 cursor_shape: CursorShape,
196 selections: Arc<[Selection<Anchor>]>,
197 lamport_timestamp: clock::Lamport,
198}
199
200/// A diagnostic associated with a certain range of a buffer.
201#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
202pub struct Diagnostic {
203 /// The name of the service that produced this diagnostic.
204 pub source: Option<String>,
205 /// A machine-readable code that identifies this diagnostic.
206 pub code: Option<NumberOrString>,
207 pub code_description: Option<lsp::Url>,
208 /// Whether this diagnostic is a hint, warning, or error.
209 pub severity: DiagnosticSeverity,
210 /// The human-readable message associated with this diagnostic.
211 pub message: String,
212 /// The human-readable message (in markdown format)
213 pub markdown: Option<String>,
214 /// An id that identifies the group to which this diagnostic belongs.
215 ///
216 /// When a language server produces a diagnostic with
217 /// one or more associated diagnostics, those diagnostics are all
218 /// assigned a single group ID.
219 pub group_id: usize,
220 /// Whether this diagnostic is the primary diagnostic for its group.
221 ///
222 /// In a given group, the primary diagnostic is the top-level diagnostic
223 /// returned by the language server. The non-primary diagnostics are the
224 /// associated diagnostics.
225 pub is_primary: bool,
226 /// Whether this diagnostic is considered to originate from an analysis of
227 /// files on disk, as opposed to any unsaved buffer contents. This is a
228 /// property of a given diagnostic source, and is configured for a given
229 /// language server via the [`LspAdapter::disk_based_diagnostic_sources`](crate::LspAdapter::disk_based_diagnostic_sources) method
230 /// for the language server.
231 pub is_disk_based: bool,
232 /// Whether this diagnostic marks unnecessary code.
233 pub is_unnecessary: bool,
234 /// Quick separation of diagnostics groups based by their source.
235 pub source_kind: DiagnosticSourceKind,
236 /// Data from language server that produced this diagnostic. Passed back to the LS when we request code actions for this diagnostic.
237 pub data: Option<Value>,
238 /// Whether to underline the corresponding text range in the editor.
239 pub underline: bool,
240}
241
242#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)]
243pub enum DiagnosticSourceKind {
244 Pulled,
245 Pushed,
246 Other,
247}
248
249/// An operation used to synchronize this buffer with its other replicas.
250#[derive(Clone, Debug, PartialEq)]
251pub enum Operation {
252 /// A text operation.
253 Buffer(text::Operation),
254
255 /// An update to the buffer's diagnostics.
256 UpdateDiagnostics {
257 /// The id of the language server that produced the new diagnostics.
258 server_id: LanguageServerId,
259 /// The diagnostics.
260 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
261 /// The buffer's lamport timestamp.
262 lamport_timestamp: clock::Lamport,
263 },
264
265 /// An update to the most recent selections in this buffer.
266 UpdateSelections {
267 /// The selections.
268 selections: Arc<[Selection<Anchor>]>,
269 /// The buffer's lamport timestamp.
270 lamport_timestamp: clock::Lamport,
271 /// Whether the selections are in 'line mode'.
272 line_mode: bool,
273 /// The [`CursorShape`] associated with these selections.
274 cursor_shape: CursorShape,
275 },
276
277 /// An update to the characters that should trigger autocompletion
278 /// for this buffer.
279 UpdateCompletionTriggers {
280 /// The characters that trigger autocompletion.
281 triggers: Vec<String>,
282 /// The buffer's lamport timestamp.
283 lamport_timestamp: clock::Lamport,
284 /// The language server ID.
285 server_id: LanguageServerId,
286 },
287}
288
289/// An event that occurs in a buffer.
290#[derive(Clone, Debug, PartialEq)]
291pub enum BufferEvent {
292 /// The buffer was changed in a way that must be
293 /// propagated to its other replicas.
294 Operation {
295 operation: Operation,
296 is_local: bool,
297 },
298 /// The buffer was edited.
299 Edited,
300 /// The buffer's `dirty` bit changed.
301 DirtyChanged,
302 /// The buffer was saved.
303 Saved,
304 /// The buffer's file was changed on disk.
305 FileHandleChanged,
306 /// The buffer was reloaded.
307 Reloaded,
308 /// The buffer is in need of a reload
309 ReloadNeeded,
310 /// The buffer's language was changed.
311 LanguageChanged,
312 /// The buffer's syntax trees were updated.
313 Reparsed,
314 /// The buffer's diagnostics were updated.
315 DiagnosticsUpdated,
316 /// The buffer gained or lost editing capabilities.
317 CapabilityChanged,
318 /// The buffer was explicitly requested to close.
319 Closed,
320 /// The buffer was discarded when closing.
321 Discarded,
322}
323
324/// The file associated with a buffer.
325pub trait File: Send + Sync + Any {
326 /// Returns the [`LocalFile`] associated with this file, if the
327 /// file is local.
328 fn as_local(&self) -> Option<&dyn LocalFile>;
329
330 /// Returns whether this file is local.
331 fn is_local(&self) -> bool {
332 self.as_local().is_some()
333 }
334
335 /// Returns whether the file is new, exists in storage, or has been deleted. Includes metadata
336 /// only available in some states, such as modification time.
337 fn disk_state(&self) -> DiskState;
338
339 /// Returns the path of this file relative to the worktree's root directory.
340 fn path(&self) -> &Arc<Path>;
341
342 /// Returns the path of this file relative to the worktree's parent directory (this means it
343 /// includes the name of the worktree's root folder).
344 fn full_path(&self, cx: &App) -> PathBuf;
345
346 /// Returns the last component of this handle's absolute path. If this handle refers to the root
347 /// of its worktree, then this method will return the name of the worktree itself.
348 fn file_name<'a>(&'a self, cx: &'a App) -> &'a OsStr;
349
350 /// Returns the id of the worktree to which this file belongs.
351 ///
352 /// This is needed for looking up project-specific settings.
353 fn worktree_id(&self, cx: &App) -> WorktreeId;
354
355 /// Converts this file into a protobuf message.
356 fn to_proto(&self, cx: &App) -> rpc::proto::File;
357
358 /// Return whether Zed considers this to be a private file.
359 fn is_private(&self) -> bool;
360}
361
362/// The file's storage status - whether it's stored (`Present`), and if so when it was last
363/// modified. In the case where the file is not stored, it can be either `New` or `Deleted`. In the
364/// UI these two states are distinguished. For example, the buffer tab does not display a deletion
365/// indicator for new files.
366#[derive(Copy, Clone, Debug, PartialEq)]
367pub enum DiskState {
368 /// File created in Zed that has not been saved.
369 New,
370 /// File present on the filesystem.
371 Present { mtime: MTime },
372 /// Deleted file that was previously present.
373 Deleted,
374}
375
376impl DiskState {
377 /// Returns the file's last known modification time on disk.
378 pub fn mtime(self) -> Option<MTime> {
379 match self {
380 DiskState::New => None,
381 DiskState::Present { mtime } => Some(mtime),
382 DiskState::Deleted => None,
383 }
384 }
385
386 pub fn exists(&self) -> bool {
387 match self {
388 DiskState::New => false,
389 DiskState::Present { .. } => true,
390 DiskState::Deleted => false,
391 }
392 }
393}
394
395/// The file associated with a buffer, in the case where the file is on the local disk.
396pub trait LocalFile: File {
397 /// Returns the absolute path of this file
398 fn abs_path(&self, cx: &App) -> PathBuf;
399
400 /// Loads the file contents from disk and returns them as a UTF-8 encoded string.
401 fn load(&self, cx: &App) -> Task<Result<String>>;
402
403 /// Loads the file's contents from disk.
404 fn load_bytes(&self, cx: &App) -> Task<Result<Vec<u8>>>;
405}
406
407/// The auto-indent behavior associated with an editing operation.
408/// For some editing operations, each affected line of text has its
409/// indentation recomputed. For other operations, the entire block
410/// of edited text is adjusted uniformly.
411#[derive(Clone, Debug)]
412pub enum AutoindentMode {
413 /// Indent each line of inserted text.
414 EachLine,
415 /// Apply the same indentation adjustment to all of the lines
416 /// in a given insertion.
417 Block {
418 /// The original indentation column of the first line of each
419 /// insertion, if it has been copied.
420 ///
421 /// Knowing this makes it possible to preserve the relative indentation
422 /// of every line in the insertion from when it was copied.
423 ///
424 /// If the original indent column is `a`, and the first line of insertion
425 /// is then auto-indented to column `b`, then every other line of
426 /// the insertion will be auto-indented to column `b - a`
427 original_indent_columns: Vec<Option<u32>>,
428 },
429}
430
431#[derive(Clone)]
432struct AutoindentRequest {
433 before_edit: BufferSnapshot,
434 entries: Vec<AutoindentRequestEntry>,
435 is_block_mode: bool,
436 ignore_empty_lines: bool,
437}
438
439#[derive(Debug, Clone)]
440struct AutoindentRequestEntry {
441 /// A range of the buffer whose indentation should be adjusted.
442 range: Range<Anchor>,
443 /// Whether or not these lines should be considered brand new, for the
444 /// purpose of auto-indent. When text is not new, its indentation will
445 /// only be adjusted if the suggested indentation level has *changed*
446 /// since the edit was made.
447 first_line_is_new: bool,
448 indent_size: IndentSize,
449 original_indent_column: Option<u32>,
450}
451
452#[derive(Debug)]
453struct IndentSuggestion {
454 basis_row: u32,
455 delta: Ordering,
456 within_error: bool,
457}
458
459struct BufferChunkHighlights<'a> {
460 captures: SyntaxMapCaptures<'a>,
461 next_capture: Option<SyntaxMapCapture<'a>>,
462 stack: Vec<(usize, HighlightId)>,
463 highlight_maps: Vec<HighlightMap>,
464}
465
466/// An iterator that yields chunks of a buffer's text, along with their
467/// syntax highlights and diagnostic status.
468pub struct BufferChunks<'a> {
469 buffer_snapshot: Option<&'a BufferSnapshot>,
470 range: Range<usize>,
471 chunks: text::Chunks<'a>,
472 diagnostic_endpoints: Option<Peekable<vec::IntoIter<DiagnosticEndpoint>>>,
473 error_depth: usize,
474 warning_depth: usize,
475 information_depth: usize,
476 hint_depth: usize,
477 unnecessary_depth: usize,
478 underline: bool,
479 highlights: Option<BufferChunkHighlights<'a>>,
480}
481
482/// A chunk of a buffer's text, along with its syntax highlight and
483/// diagnostic status.
484#[derive(Clone, Debug, Default)]
485pub struct Chunk<'a> {
486 /// The text of the chunk.
487 pub text: &'a str,
488 /// The syntax highlighting style of the chunk.
489 pub syntax_highlight_id: Option<HighlightId>,
490 /// The highlight style that has been applied to this chunk in
491 /// the editor.
492 pub highlight_style: Option<HighlightStyle>,
493 /// The severity of diagnostic associated with this chunk, if any.
494 pub diagnostic_severity: Option<DiagnosticSeverity>,
495 /// Whether this chunk of text is marked as unnecessary.
496 pub is_unnecessary: bool,
497 /// Whether this chunk of text was originally a tab character.
498 pub is_tab: bool,
499 /// Whether this chunk of text was originally a tab character.
500 pub is_inlay: bool,
501 /// Whether to underline the corresponding text range in the editor.
502 pub underline: bool,
503}
504
505/// A set of edits to a given version of a buffer, computed asynchronously.
506#[derive(Debug)]
507pub struct Diff {
508 pub base_version: clock::Global,
509 pub line_ending: LineEnding,
510 pub edits: Vec<(Range<usize>, Arc<str>)>,
511}
512
513#[derive(Debug, Clone, Copy)]
514pub(crate) struct DiagnosticEndpoint {
515 offset: usize,
516 is_start: bool,
517 underline: bool,
518 severity: DiagnosticSeverity,
519 is_unnecessary: bool,
520}
521
522/// A class of characters, used for characterizing a run of text.
523#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
524pub enum CharKind {
525 /// Whitespace.
526 Whitespace,
527 /// Punctuation.
528 Punctuation,
529 /// Word.
530 Word,
531}
532
533/// A runnable is a set of data about a region that could be resolved into a task
534pub struct Runnable {
535 pub tags: SmallVec<[RunnableTag; 1]>,
536 pub language: Arc<Language>,
537 pub buffer: BufferId,
538}
539
540#[derive(Default, Clone, Debug)]
541pub struct HighlightedText {
542 pub text: SharedString,
543 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
544}
545
546#[derive(Default, Debug)]
547struct HighlightedTextBuilder {
548 pub text: String,
549 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
550}
551
552impl HighlightedText {
553 pub fn from_buffer_range<T: ToOffset>(
554 range: Range<T>,
555 snapshot: &text::BufferSnapshot,
556 syntax_snapshot: &SyntaxSnapshot,
557 override_style: Option<HighlightStyle>,
558 syntax_theme: &SyntaxTheme,
559 ) -> Self {
560 let mut highlighted_text = HighlightedTextBuilder::default();
561 highlighted_text.add_text_from_buffer_range(
562 range,
563 snapshot,
564 syntax_snapshot,
565 override_style,
566 syntax_theme,
567 );
568 highlighted_text.build()
569 }
570
571 pub fn to_styled_text(&self, default_style: &TextStyle) -> StyledText {
572 gpui::StyledText::new(self.text.clone())
573 .with_default_highlights(default_style, self.highlights.iter().cloned())
574 }
575
576 /// Returns the first line without leading whitespace unless highlighted
577 /// and a boolean indicating if there are more lines after
578 pub fn first_line_preview(self) -> (Self, bool) {
579 let newline_ix = self.text.find('\n').unwrap_or(self.text.len());
580 let first_line = &self.text[..newline_ix];
581
582 // Trim leading whitespace, unless an edit starts prior to it.
583 let mut preview_start_ix = first_line.len() - first_line.trim_start().len();
584 if let Some((first_highlight_range, _)) = self.highlights.first() {
585 preview_start_ix = preview_start_ix.min(first_highlight_range.start);
586 }
587
588 let preview_text = &first_line[preview_start_ix..];
589 let preview_highlights = self
590 .highlights
591 .into_iter()
592 .take_while(|(range, _)| range.start < newline_ix)
593 .filter_map(|(mut range, highlight)| {
594 range.start = range.start.saturating_sub(preview_start_ix);
595 range.end = range.end.saturating_sub(preview_start_ix).min(newline_ix);
596 if range.is_empty() {
597 None
598 } else {
599 Some((range, highlight))
600 }
601 });
602
603 let preview = Self {
604 text: SharedString::new(preview_text),
605 highlights: preview_highlights.collect(),
606 };
607
608 (preview, self.text.len() > newline_ix)
609 }
610}
611
612impl HighlightedTextBuilder {
613 pub fn build(self) -> HighlightedText {
614 HighlightedText {
615 text: self.text.into(),
616 highlights: self.highlights,
617 }
618 }
619
620 pub fn add_text_from_buffer_range<T: ToOffset>(
621 &mut self,
622 range: Range<T>,
623 snapshot: &text::BufferSnapshot,
624 syntax_snapshot: &SyntaxSnapshot,
625 override_style: Option<HighlightStyle>,
626 syntax_theme: &SyntaxTheme,
627 ) {
628 let range = range.to_offset(snapshot);
629 for chunk in Self::highlighted_chunks(range, snapshot, syntax_snapshot) {
630 let start = self.text.len();
631 self.text.push_str(chunk.text);
632 let end = self.text.len();
633
634 if let Some(mut highlight_style) = chunk
635 .syntax_highlight_id
636 .and_then(|id| id.style(syntax_theme))
637 {
638 if let Some(override_style) = override_style {
639 highlight_style.highlight(override_style);
640 }
641 self.highlights.push((start..end, highlight_style));
642 } else if let Some(override_style) = override_style {
643 self.highlights.push((start..end, override_style));
644 }
645 }
646 }
647
648 fn highlighted_chunks<'a>(
649 range: Range<usize>,
650 snapshot: &'a text::BufferSnapshot,
651 syntax_snapshot: &'a SyntaxSnapshot,
652 ) -> BufferChunks<'a> {
653 let captures = syntax_snapshot.captures(range.clone(), snapshot, |grammar| {
654 grammar.highlights_query.as_ref()
655 });
656
657 let highlight_maps = captures
658 .grammars()
659 .iter()
660 .map(|grammar| grammar.highlight_map())
661 .collect();
662
663 BufferChunks::new(
664 snapshot.as_rope(),
665 range,
666 Some((captures, highlight_maps)),
667 false,
668 None,
669 )
670 }
671}
672
673#[derive(Clone)]
674pub struct EditPreview {
675 old_snapshot: text::BufferSnapshot,
676 applied_edits_snapshot: text::BufferSnapshot,
677 syntax_snapshot: SyntaxSnapshot,
678}
679
680impl EditPreview {
681 pub fn highlight_edits(
682 &self,
683 current_snapshot: &BufferSnapshot,
684 edits: &[(Range<Anchor>, String)],
685 include_deletions: bool,
686 cx: &App,
687 ) -> HighlightedText {
688 let Some(visible_range_in_preview_snapshot) = self.compute_visible_range(edits) else {
689 return HighlightedText::default();
690 };
691
692 let mut highlighted_text = HighlightedTextBuilder::default();
693
694 let mut offset_in_preview_snapshot = visible_range_in_preview_snapshot.start;
695
696 let insertion_highlight_style = HighlightStyle {
697 background_color: Some(cx.theme().status().created_background),
698 ..Default::default()
699 };
700 let deletion_highlight_style = HighlightStyle {
701 background_color: Some(cx.theme().status().deleted_background),
702 ..Default::default()
703 };
704 let syntax_theme = cx.theme().syntax();
705
706 for (range, edit_text) in edits {
707 let edit_new_end_in_preview_snapshot = range
708 .end
709 .bias_right(&self.old_snapshot)
710 .to_offset(&self.applied_edits_snapshot);
711 let edit_start_in_preview_snapshot = edit_new_end_in_preview_snapshot - edit_text.len();
712
713 let unchanged_range_in_preview_snapshot =
714 offset_in_preview_snapshot..edit_start_in_preview_snapshot;
715 if !unchanged_range_in_preview_snapshot.is_empty() {
716 highlighted_text.add_text_from_buffer_range(
717 unchanged_range_in_preview_snapshot,
718 &self.applied_edits_snapshot,
719 &self.syntax_snapshot,
720 None,
721 &syntax_theme,
722 );
723 }
724
725 let range_in_current_snapshot = range.to_offset(current_snapshot);
726 if include_deletions && !range_in_current_snapshot.is_empty() {
727 highlighted_text.add_text_from_buffer_range(
728 range_in_current_snapshot,
729 ¤t_snapshot.text,
730 ¤t_snapshot.syntax,
731 Some(deletion_highlight_style),
732 &syntax_theme,
733 );
734 }
735
736 if !edit_text.is_empty() {
737 highlighted_text.add_text_from_buffer_range(
738 edit_start_in_preview_snapshot..edit_new_end_in_preview_snapshot,
739 &self.applied_edits_snapshot,
740 &self.syntax_snapshot,
741 Some(insertion_highlight_style),
742 &syntax_theme,
743 );
744 }
745
746 offset_in_preview_snapshot = edit_new_end_in_preview_snapshot;
747 }
748
749 highlighted_text.add_text_from_buffer_range(
750 offset_in_preview_snapshot..visible_range_in_preview_snapshot.end,
751 &self.applied_edits_snapshot,
752 &self.syntax_snapshot,
753 None,
754 &syntax_theme,
755 );
756
757 highlighted_text.build()
758 }
759
760 fn compute_visible_range(&self, edits: &[(Range<Anchor>, String)]) -> Option<Range<usize>> {
761 let (first, _) = edits.first()?;
762 let (last, _) = edits.last()?;
763
764 let start = first
765 .start
766 .bias_left(&self.old_snapshot)
767 .to_point(&self.applied_edits_snapshot);
768 let end = last
769 .end
770 .bias_right(&self.old_snapshot)
771 .to_point(&self.applied_edits_snapshot);
772
773 // Ensure that the first line of the first edit and the last line of the last edit are always fully visible
774 let range = Point::new(start.row, 0)
775 ..Point::new(end.row, self.applied_edits_snapshot.line_len(end.row));
776
777 Some(range.to_offset(&self.applied_edits_snapshot))
778 }
779}
780
781#[derive(Clone, Debug, PartialEq, Eq)]
782pub struct BracketMatch {
783 pub open_range: Range<usize>,
784 pub close_range: Range<usize>,
785 pub newline_only: bool,
786}
787
788impl Buffer {
789 /// Create a new buffer with the given base text.
790 pub fn local<T: Into<String>>(base_text: T, cx: &Context<Self>) -> Self {
791 Self::build(
792 TextBuffer::new(0, cx.entity_id().as_non_zero_u64().into(), base_text.into()),
793 None,
794 Capability::ReadWrite,
795 )
796 }
797
798 /// Create a new buffer with the given base text that has proper line endings and other normalization applied.
799 pub fn local_normalized(
800 base_text_normalized: Rope,
801 line_ending: LineEnding,
802 cx: &Context<Self>,
803 ) -> Self {
804 Self::build(
805 TextBuffer::new_normalized(
806 0,
807 cx.entity_id().as_non_zero_u64().into(),
808 line_ending,
809 base_text_normalized,
810 ),
811 None,
812 Capability::ReadWrite,
813 )
814 }
815
816 /// Create a new buffer that is a replica of a remote buffer.
817 pub fn remote(
818 remote_id: BufferId,
819 replica_id: ReplicaId,
820 capability: Capability,
821 base_text: impl Into<String>,
822 ) -> Self {
823 Self::build(
824 TextBuffer::new(replica_id, remote_id, base_text.into()),
825 None,
826 capability,
827 )
828 }
829
830 /// Create a new buffer that is a replica of a remote buffer, populating its
831 /// state from the given protobuf message.
832 pub fn from_proto(
833 replica_id: ReplicaId,
834 capability: Capability,
835 message: proto::BufferState,
836 file: Option<Arc<dyn File>>,
837 ) -> Result<Self> {
838 let buffer_id = BufferId::new(message.id).context("Could not deserialize buffer_id")?;
839 let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text);
840 let mut this = Self::build(buffer, file, capability);
841 this.text.set_line_ending(proto::deserialize_line_ending(
842 rpc::proto::LineEnding::from_i32(message.line_ending).context("missing line_ending")?,
843 ));
844 this.saved_version = proto::deserialize_version(&message.saved_version);
845 this.saved_mtime = message.saved_mtime.map(|time| time.into());
846 Ok(this)
847 }
848
849 /// Serialize the buffer's state to a protobuf message.
850 pub fn to_proto(&self, cx: &App) -> proto::BufferState {
851 proto::BufferState {
852 id: self.remote_id().into(),
853 file: self.file.as_ref().map(|f| f.to_proto(cx)),
854 base_text: self.base_text().to_string(),
855 line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
856 saved_version: proto::serialize_version(&self.saved_version),
857 saved_mtime: self.saved_mtime.map(|time| time.into()),
858 }
859 }
860
861 /// Serialize as protobufs all of the changes to the buffer since the given version.
862 pub fn serialize_ops(
863 &self,
864 since: Option<clock::Global>,
865 cx: &App,
866 ) -> Task<Vec<proto::Operation>> {
867 let mut operations = Vec::new();
868 operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
869
870 operations.extend(self.remote_selections.iter().map(|(_, set)| {
871 proto::serialize_operation(&Operation::UpdateSelections {
872 selections: set.selections.clone(),
873 lamport_timestamp: set.lamport_timestamp,
874 line_mode: set.line_mode,
875 cursor_shape: set.cursor_shape,
876 })
877 }));
878
879 for (server_id, diagnostics) in &self.diagnostics {
880 operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
881 lamport_timestamp: self.diagnostics_timestamp,
882 server_id: *server_id,
883 diagnostics: diagnostics.iter().cloned().collect(),
884 }));
885 }
886
887 for (server_id, completions) in &self.completion_triggers_per_language_server {
888 operations.push(proto::serialize_operation(
889 &Operation::UpdateCompletionTriggers {
890 triggers: completions.iter().cloned().collect(),
891 lamport_timestamp: self.completion_triggers_timestamp,
892 server_id: *server_id,
893 },
894 ));
895 }
896
897 let text_operations = self.text.operations().clone();
898 cx.background_spawn(async move {
899 let since = since.unwrap_or_default();
900 operations.extend(
901 text_operations
902 .iter()
903 .filter(|(_, op)| !since.observed(op.timestamp()))
904 .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
905 );
906 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
907 operations
908 })
909 }
910
911 /// Assign a language to the buffer, returning the buffer.
912 pub fn with_language(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
913 self.set_language(Some(language), cx);
914 self
915 }
916
917 /// Returns the [`Capability`] of this buffer.
918 pub fn capability(&self) -> Capability {
919 self.capability
920 }
921
922 /// Whether this buffer can only be read.
923 pub fn read_only(&self) -> bool {
924 self.capability == Capability::ReadOnly
925 }
926
927 /// Builds a [`Buffer`] with the given underlying [`TextBuffer`], diff base, [`File`] and [`Capability`].
928 pub fn build(buffer: TextBuffer, file: Option<Arc<dyn File>>, capability: Capability) -> Self {
929 let saved_mtime = file.as_ref().and_then(|file| file.disk_state().mtime());
930 let snapshot = buffer.snapshot();
931 let syntax_map = Mutex::new(SyntaxMap::new(&snapshot));
932 Self {
933 saved_mtime,
934 saved_version: buffer.version(),
935 preview_version: buffer.version(),
936 reload_task: None,
937 transaction_depth: 0,
938 was_dirty_before_starting_transaction: None,
939 has_unsaved_edits: Cell::new((buffer.version(), false)),
940 text: buffer,
941 branch_state: None,
942 file,
943 capability,
944 syntax_map,
945 reparse: None,
946 non_text_state_update_count: 0,
947 sync_parse_timeout: Duration::from_millis(1),
948 parse_status: async_watch::channel(ParseStatus::Idle),
949 autoindent_requests: Default::default(),
950 pending_autoindent: Default::default(),
951 language: None,
952 remote_selections: Default::default(),
953 diagnostics: Default::default(),
954 diagnostics_timestamp: Default::default(),
955 completion_triggers: Default::default(),
956 completion_triggers_per_language_server: Default::default(),
957 completion_triggers_timestamp: Default::default(),
958 deferred_ops: OperationQueue::new(),
959 has_conflict: false,
960 pull_diagnostics_result_id: None,
961 change_bits: Default::default(),
962 _subscriptions: Vec::new(),
963 }
964 }
965
966 pub fn build_snapshot(
967 text: Rope,
968 language: Option<Arc<Language>>,
969 language_registry: Option<Arc<LanguageRegistry>>,
970 cx: &mut App,
971 ) -> impl Future<Output = BufferSnapshot> + use<> {
972 let entity_id = cx.reserve_entity::<Self>().entity_id();
973 let buffer_id = entity_id.as_non_zero_u64().into();
974 async move {
975 let text =
976 TextBuffer::new_normalized(0, buffer_id, Default::default(), text).snapshot();
977 let mut syntax = SyntaxMap::new(&text).snapshot();
978 if let Some(language) = language.clone() {
979 let text = text.clone();
980 let language = language.clone();
981 let language_registry = language_registry.clone();
982 syntax.reparse(&text, language_registry, language);
983 }
984 BufferSnapshot {
985 text,
986 syntax,
987 file: None,
988 diagnostics: Default::default(),
989 remote_selections: Default::default(),
990 language,
991 non_text_state_update_count: 0,
992 }
993 }
994 }
995
996 pub fn build_empty_snapshot(cx: &mut App) -> BufferSnapshot {
997 let entity_id = cx.reserve_entity::<Self>().entity_id();
998 let buffer_id = entity_id.as_non_zero_u64().into();
999 let text =
1000 TextBuffer::new_normalized(0, buffer_id, Default::default(), Rope::new()).snapshot();
1001 let syntax = SyntaxMap::new(&text).snapshot();
1002 BufferSnapshot {
1003 text,
1004 syntax,
1005 file: None,
1006 diagnostics: Default::default(),
1007 remote_selections: Default::default(),
1008 language: None,
1009 non_text_state_update_count: 0,
1010 }
1011 }
1012
1013 #[cfg(any(test, feature = "test-support"))]
1014 pub fn build_snapshot_sync(
1015 text: Rope,
1016 language: Option<Arc<Language>>,
1017 language_registry: Option<Arc<LanguageRegistry>>,
1018 cx: &mut App,
1019 ) -> BufferSnapshot {
1020 let entity_id = cx.reserve_entity::<Self>().entity_id();
1021 let buffer_id = entity_id.as_non_zero_u64().into();
1022 let text = TextBuffer::new_normalized(0, buffer_id, Default::default(), text).snapshot();
1023 let mut syntax = SyntaxMap::new(&text).snapshot();
1024 if let Some(language) = language.clone() {
1025 let text = text.clone();
1026 let language = language.clone();
1027 let language_registry = language_registry.clone();
1028 syntax.reparse(&text, language_registry, language);
1029 }
1030 BufferSnapshot {
1031 text,
1032 syntax,
1033 file: None,
1034 diagnostics: Default::default(),
1035 remote_selections: Default::default(),
1036 language,
1037 non_text_state_update_count: 0,
1038 }
1039 }
1040
1041 /// Retrieve a snapshot of the buffer's current state. This is computationally
1042 /// cheap, and allows reading from the buffer on a background thread.
1043 pub fn snapshot(&self) -> BufferSnapshot {
1044 let text = self.text.snapshot();
1045 let mut syntax_map = self.syntax_map.lock();
1046 syntax_map.interpolate(&text);
1047 let syntax = syntax_map.snapshot();
1048
1049 BufferSnapshot {
1050 text,
1051 syntax,
1052 file: self.file.clone(),
1053 remote_selections: self.remote_selections.clone(),
1054 diagnostics: self.diagnostics.clone(),
1055 language: self.language.clone(),
1056 non_text_state_update_count: self.non_text_state_update_count,
1057 }
1058 }
1059
1060 pub fn branch(&mut self, cx: &mut Context<Self>) -> Entity<Self> {
1061 let this = cx.entity();
1062 cx.new(|cx| {
1063 let mut branch = Self {
1064 branch_state: Some(BufferBranchState {
1065 base_buffer: this.clone(),
1066 merged_operations: Default::default(),
1067 }),
1068 language: self.language.clone(),
1069 has_conflict: self.has_conflict,
1070 has_unsaved_edits: Cell::new(self.has_unsaved_edits.get_mut().clone()),
1071 _subscriptions: vec![cx.subscribe(&this, Self::on_base_buffer_event)],
1072 ..Self::build(self.text.branch(), self.file.clone(), self.capability())
1073 };
1074 if let Some(language_registry) = self.language_registry() {
1075 branch.set_language_registry(language_registry);
1076 }
1077
1078 // Reparse the branch buffer so that we get syntax highlighting immediately.
1079 branch.reparse(cx);
1080
1081 branch
1082 })
1083 }
1084
1085 pub fn preview_edits(
1086 &self,
1087 edits: Arc<[(Range<Anchor>, String)]>,
1088 cx: &App,
1089 ) -> Task<EditPreview> {
1090 let registry = self.language_registry();
1091 let language = self.language().cloned();
1092 let old_snapshot = self.text.snapshot();
1093 let mut branch_buffer = self.text.branch();
1094 let mut syntax_snapshot = self.syntax_map.lock().snapshot();
1095 cx.background_spawn(async move {
1096 if !edits.is_empty() {
1097 if let Some(language) = language.clone() {
1098 syntax_snapshot.reparse(&old_snapshot, registry.clone(), language);
1099 }
1100
1101 branch_buffer.edit(edits.iter().cloned());
1102 let snapshot = branch_buffer.snapshot();
1103 syntax_snapshot.interpolate(&snapshot);
1104
1105 if let Some(language) = language {
1106 syntax_snapshot.reparse(&snapshot, registry, language);
1107 }
1108 }
1109 EditPreview {
1110 old_snapshot,
1111 applied_edits_snapshot: branch_buffer.snapshot(),
1112 syntax_snapshot,
1113 }
1114 })
1115 }
1116
1117 /// Applies all of the changes in this buffer that intersect any of the
1118 /// given `ranges` to its base buffer.
1119 ///
1120 /// If `ranges` is empty, then all changes will be applied. This buffer must
1121 /// be a branch buffer to call this method.
1122 pub fn merge_into_base(&mut self, ranges: Vec<Range<usize>>, cx: &mut Context<Self>) {
1123 let Some(base_buffer) = self.base_buffer() else {
1124 debug_panic!("not a branch buffer");
1125 return;
1126 };
1127
1128 let mut ranges = if ranges.is_empty() {
1129 &[0..usize::MAX]
1130 } else {
1131 ranges.as_slice()
1132 }
1133 .into_iter()
1134 .peekable();
1135
1136 let mut edits = Vec::new();
1137 for edit in self.edits_since::<usize>(&base_buffer.read(cx).version()) {
1138 let mut is_included = false;
1139 while let Some(range) = ranges.peek() {
1140 if range.end < edit.new.start {
1141 ranges.next().unwrap();
1142 } else {
1143 if range.start <= edit.new.end {
1144 is_included = true;
1145 }
1146 break;
1147 }
1148 }
1149
1150 if is_included {
1151 edits.push((
1152 edit.old.clone(),
1153 self.text_for_range(edit.new.clone()).collect::<String>(),
1154 ));
1155 }
1156 }
1157
1158 let operation = base_buffer.update(cx, |base_buffer, cx| {
1159 // cx.emit(BufferEvent::DiffBaseChanged);
1160 base_buffer.edit(edits, None, cx)
1161 });
1162
1163 if let Some(operation) = operation {
1164 if let Some(BufferBranchState {
1165 merged_operations, ..
1166 }) = &mut self.branch_state
1167 {
1168 merged_operations.push(operation);
1169 }
1170 }
1171 }
1172
1173 fn on_base_buffer_event(
1174 &mut self,
1175 _: Entity<Buffer>,
1176 event: &BufferEvent,
1177 cx: &mut Context<Self>,
1178 ) {
1179 let BufferEvent::Operation { operation, .. } = event else {
1180 return;
1181 };
1182 let Some(BufferBranchState {
1183 merged_operations, ..
1184 }) = &mut self.branch_state
1185 else {
1186 return;
1187 };
1188
1189 let mut operation_to_undo = None;
1190 if let Operation::Buffer(text::Operation::Edit(operation)) = &operation {
1191 if let Ok(ix) = merged_operations.binary_search(&operation.timestamp) {
1192 merged_operations.remove(ix);
1193 operation_to_undo = Some(operation.timestamp);
1194 }
1195 }
1196
1197 self.apply_ops([operation.clone()], cx);
1198
1199 if let Some(timestamp) = operation_to_undo {
1200 let counts = [(timestamp, u32::MAX)].into_iter().collect();
1201 self.undo_operations(counts, cx);
1202 }
1203 }
1204
1205 #[cfg(test)]
1206 pub(crate) fn as_text_snapshot(&self) -> &text::BufferSnapshot {
1207 &self.text
1208 }
1209
1210 /// Retrieve a snapshot of the buffer's raw text, without any
1211 /// language-related state like the syntax tree or diagnostics.
1212 pub fn text_snapshot(&self) -> text::BufferSnapshot {
1213 self.text.snapshot()
1214 }
1215
1216 /// The file associated with the buffer, if any.
1217 pub fn file(&self) -> Option<&Arc<dyn File>> {
1218 self.file.as_ref()
1219 }
1220
1221 /// The version of the buffer that was last saved or reloaded from disk.
1222 pub fn saved_version(&self) -> &clock::Global {
1223 &self.saved_version
1224 }
1225
1226 /// The mtime of the buffer's file when the buffer was last saved or reloaded from disk.
1227 pub fn saved_mtime(&self) -> Option<MTime> {
1228 self.saved_mtime
1229 }
1230
1231 /// Assign a language to the buffer.
1232 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1233 self.non_text_state_update_count += 1;
1234 self.syntax_map.lock().clear(&self.text);
1235 self.language = language;
1236 self.was_changed();
1237 self.reparse(cx);
1238 cx.emit(BufferEvent::LanguageChanged);
1239 }
1240
1241 /// Assign a language registry to the buffer. This allows the buffer to retrieve
1242 /// other languages if parts of the buffer are written in different languages.
1243 pub fn set_language_registry(&self, language_registry: Arc<LanguageRegistry>) {
1244 self.syntax_map
1245 .lock()
1246 .set_language_registry(language_registry);
1247 }
1248
1249 pub fn language_registry(&self) -> Option<Arc<LanguageRegistry>> {
1250 self.syntax_map.lock().language_registry()
1251 }
1252
1253 /// Assign the buffer a new [`Capability`].
1254 pub fn set_capability(&mut self, capability: Capability, cx: &mut Context<Self>) {
1255 self.capability = capability;
1256 cx.emit(BufferEvent::CapabilityChanged)
1257 }
1258
1259 /// This method is called to signal that the buffer has been saved.
1260 pub fn did_save(
1261 &mut self,
1262 version: clock::Global,
1263 mtime: Option<MTime>,
1264 cx: &mut Context<Self>,
1265 ) {
1266 self.saved_version = version;
1267 self.has_unsaved_edits
1268 .set((self.saved_version().clone(), false));
1269 self.has_conflict = false;
1270 self.saved_mtime = mtime;
1271 self.was_changed();
1272 cx.emit(BufferEvent::Saved);
1273 cx.notify();
1274 }
1275
1276 /// This method is called to signal that the buffer has been discarded.
1277 pub fn discarded(&self, cx: &mut Context<Self>) {
1278 cx.emit(BufferEvent::Discarded);
1279 cx.notify();
1280 }
1281
1282 /// Reloads the contents of the buffer from disk.
1283 pub fn reload(&mut self, cx: &Context<Self>) -> oneshot::Receiver<Option<Transaction>> {
1284 let (tx, rx) = futures::channel::oneshot::channel();
1285 let prev_version = self.text.version();
1286 self.reload_task = Some(cx.spawn(async move |this, cx| {
1287 let Some((new_mtime, new_text)) = this.update(cx, |this, cx| {
1288 let file = this.file.as_ref()?.as_local()?;
1289
1290 Some((file.disk_state().mtime(), file.load(cx)))
1291 })?
1292 else {
1293 return Ok(());
1294 };
1295
1296 let new_text = new_text.await?;
1297 let diff = this
1298 .update(cx, |this, cx| this.diff(new_text.clone(), cx))?
1299 .await;
1300 this.update(cx, |this, cx| {
1301 if this.version() == diff.base_version {
1302 this.finalize_last_transaction();
1303 this.apply_diff(diff, cx);
1304 tx.send(this.finalize_last_transaction().cloned()).ok();
1305 this.has_conflict = false;
1306 this.did_reload(this.version(), this.line_ending(), new_mtime, cx);
1307 } else {
1308 if !diff.edits.is_empty()
1309 || this
1310 .edits_since::<usize>(&diff.base_version)
1311 .next()
1312 .is_some()
1313 {
1314 this.has_conflict = true;
1315 }
1316
1317 this.did_reload(prev_version, this.line_ending(), this.saved_mtime, cx);
1318 }
1319
1320 this.reload_task.take();
1321 })
1322 }));
1323 rx
1324 }
1325
1326 /// This method is called to signal that the buffer has been reloaded.
1327 pub fn did_reload(
1328 &mut self,
1329 version: clock::Global,
1330 line_ending: LineEnding,
1331 mtime: Option<MTime>,
1332 cx: &mut Context<Self>,
1333 ) {
1334 self.saved_version = version;
1335 self.has_unsaved_edits
1336 .set((self.saved_version.clone(), false));
1337 self.text.set_line_ending(line_ending);
1338 self.saved_mtime = mtime;
1339 cx.emit(BufferEvent::Reloaded);
1340 cx.notify();
1341 }
1342
1343 /// Updates the [`File`] backing this buffer. This should be called when
1344 /// the file has changed or has been deleted.
1345 pub fn file_updated(&mut self, new_file: Arc<dyn File>, cx: &mut Context<Self>) {
1346 let was_dirty = self.is_dirty();
1347 let mut file_changed = false;
1348
1349 if let Some(old_file) = self.file.as_ref() {
1350 if new_file.path() != old_file.path() {
1351 file_changed = true;
1352 }
1353
1354 let old_state = old_file.disk_state();
1355 let new_state = new_file.disk_state();
1356 if old_state != new_state {
1357 file_changed = true;
1358 if !was_dirty && matches!(new_state, DiskState::Present { .. }) {
1359 cx.emit(BufferEvent::ReloadNeeded)
1360 }
1361 }
1362 } else {
1363 file_changed = true;
1364 };
1365
1366 self.file = Some(new_file);
1367 if file_changed {
1368 self.was_changed();
1369 self.non_text_state_update_count += 1;
1370 if was_dirty != self.is_dirty() {
1371 cx.emit(BufferEvent::DirtyChanged);
1372 }
1373 cx.emit(BufferEvent::FileHandleChanged);
1374 cx.notify();
1375 }
1376 }
1377
1378 pub fn base_buffer(&self) -> Option<Entity<Self>> {
1379 Some(self.branch_state.as_ref()?.base_buffer.clone())
1380 }
1381
1382 /// Returns the primary [`Language`] assigned to this [`Buffer`].
1383 pub fn language(&self) -> Option<&Arc<Language>> {
1384 self.language.as_ref()
1385 }
1386
1387 /// Returns the [`Language`] at the given location.
1388 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
1389 let offset = position.to_offset(self);
1390 let mut is_first = true;
1391 let start_anchor = self.anchor_before(offset);
1392 let end_anchor = self.anchor_after(offset);
1393 self.syntax_map
1394 .lock()
1395 .layers_for_range(offset..offset, &self.text, false)
1396 .filter(|layer| {
1397 if is_first {
1398 is_first = false;
1399 return true;
1400 }
1401 let any_sub_ranges_contain_range = layer
1402 .included_sub_ranges
1403 .map(|sub_ranges| {
1404 sub_ranges.iter().any(|sub_range| {
1405 let is_before_start = sub_range.end.cmp(&start_anchor, self).is_lt();
1406 let is_after_end = sub_range.start.cmp(&end_anchor, self).is_gt();
1407 !is_before_start && !is_after_end
1408 })
1409 })
1410 .unwrap_or(true);
1411 let result = any_sub_ranges_contain_range;
1412 return result;
1413 })
1414 .last()
1415 .map(|info| info.language.clone())
1416 .or_else(|| self.language.clone())
1417 }
1418
1419 /// Returns each [`Language`] for the active syntax layers at the given location.
1420 pub fn languages_at<D: ToOffset>(&self, position: D) -> Vec<Arc<Language>> {
1421 let offset = position.to_offset(self);
1422 let mut languages: Vec<Arc<Language>> = self
1423 .syntax_map
1424 .lock()
1425 .layers_for_range(offset..offset, &self.text, false)
1426 .map(|info| info.language.clone())
1427 .collect();
1428
1429 if languages.is_empty() {
1430 if let Some(buffer_language) = self.language() {
1431 languages.push(buffer_language.clone());
1432 }
1433 }
1434
1435 languages
1436 }
1437
1438 /// An integer version number that accounts for all updates besides
1439 /// the buffer's text itself (which is versioned via a version vector).
1440 pub fn non_text_state_update_count(&self) -> usize {
1441 self.non_text_state_update_count
1442 }
1443
1444 /// Whether the buffer is being parsed in the background.
1445 #[cfg(any(test, feature = "test-support"))]
1446 pub fn is_parsing(&self) -> bool {
1447 self.reparse.is_some()
1448 }
1449
1450 /// Indicates whether the buffer contains any regions that may be
1451 /// written in a language that hasn't been loaded yet.
1452 pub fn contains_unknown_injections(&self) -> bool {
1453 self.syntax_map.lock().contains_unknown_injections()
1454 }
1455
1456 #[cfg(test)]
1457 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
1458 self.sync_parse_timeout = timeout;
1459 }
1460
1461 /// Called after an edit to synchronize the buffer's main parse tree with
1462 /// the buffer's new underlying state.
1463 ///
1464 /// Locks the syntax map and interpolates the edits since the last reparse
1465 /// into the foreground syntax tree.
1466 ///
1467 /// Then takes a stable snapshot of the syntax map before unlocking it.
1468 /// The snapshot with the interpolated edits is sent to a background thread,
1469 /// where we ask Tree-sitter to perform an incremental parse.
1470 ///
1471 /// Meanwhile, in the foreground, we block the main thread for up to 1ms
1472 /// waiting on the parse to complete. As soon as it completes, we proceed
1473 /// synchronously, unless a 1ms timeout elapses.
1474 ///
1475 /// If we time out waiting on the parse, we spawn a second task waiting
1476 /// until the parse does complete and return with the interpolated tree still
1477 /// in the foreground. When the background parse completes, call back into
1478 /// the main thread and assign the foreground parse state.
1479 ///
1480 /// If the buffer or grammar changed since the start of the background parse,
1481 /// initiate an additional reparse recursively. To avoid concurrent parses
1482 /// for the same buffer, we only initiate a new parse if we are not already
1483 /// parsing in the background.
1484 pub fn reparse(&mut self, cx: &mut Context<Self>) {
1485 if self.reparse.is_some() {
1486 return;
1487 }
1488 let language = if let Some(language) = self.language.clone() {
1489 language
1490 } else {
1491 return;
1492 };
1493
1494 let text = self.text_snapshot();
1495 let parsed_version = self.version();
1496
1497 let mut syntax_map = self.syntax_map.lock();
1498 syntax_map.interpolate(&text);
1499 let language_registry = syntax_map.language_registry();
1500 let mut syntax_snapshot = syntax_map.snapshot();
1501 drop(syntax_map);
1502
1503 let parse_task = cx.background_spawn({
1504 let language = language.clone();
1505 let language_registry = language_registry.clone();
1506 async move {
1507 syntax_snapshot.reparse(&text, language_registry, language);
1508 syntax_snapshot
1509 }
1510 });
1511
1512 self.parse_status.0.send(ParseStatus::Parsing).unwrap();
1513 match cx
1514 .background_executor()
1515 .block_with_timeout(self.sync_parse_timeout, parse_task)
1516 {
1517 Ok(new_syntax_snapshot) => {
1518 self.did_finish_parsing(new_syntax_snapshot, cx);
1519 self.reparse = None;
1520 }
1521 Err(parse_task) => {
1522 self.reparse = Some(cx.spawn(async move |this, cx| {
1523 let new_syntax_map = parse_task.await;
1524 this.update(cx, move |this, cx| {
1525 let grammar_changed =
1526 this.language.as_ref().map_or(true, |current_language| {
1527 !Arc::ptr_eq(&language, current_language)
1528 });
1529 let language_registry_changed = new_syntax_map
1530 .contains_unknown_injections()
1531 && language_registry.map_or(false, |registry| {
1532 registry.version() != new_syntax_map.language_registry_version()
1533 });
1534 let parse_again = language_registry_changed
1535 || grammar_changed
1536 || this.version.changed_since(&parsed_version);
1537 this.did_finish_parsing(new_syntax_map, cx);
1538 this.reparse = None;
1539 if parse_again {
1540 this.reparse(cx);
1541 }
1542 })
1543 .ok();
1544 }));
1545 }
1546 }
1547 }
1548
1549 fn did_finish_parsing(&mut self, syntax_snapshot: SyntaxSnapshot, cx: &mut Context<Self>) {
1550 self.was_changed();
1551 self.non_text_state_update_count += 1;
1552 self.syntax_map.lock().did_parse(syntax_snapshot);
1553 self.request_autoindent(cx);
1554 self.parse_status.0.send(ParseStatus::Idle).unwrap();
1555 cx.emit(BufferEvent::Reparsed);
1556 cx.notify();
1557 }
1558
1559 pub fn parse_status(&self) -> watch::Receiver<ParseStatus> {
1560 self.parse_status.1.clone()
1561 }
1562
1563 /// Assign to the buffer a set of diagnostics created by a given language server.
1564 pub fn update_diagnostics(
1565 &mut self,
1566 server_id: LanguageServerId,
1567 diagnostics: DiagnosticSet,
1568 cx: &mut Context<Self>,
1569 ) {
1570 let lamport_timestamp = self.text.lamport_clock.tick();
1571 let op = Operation::UpdateDiagnostics {
1572 server_id,
1573 diagnostics: diagnostics.iter().cloned().collect(),
1574 lamport_timestamp,
1575 };
1576 self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
1577 self.send_operation(op, true, cx);
1578 }
1579
1580 pub fn get_diagnostics(&self, server_id: LanguageServerId) -> Option<&DiagnosticSet> {
1581 let Ok(idx) = self.diagnostics.binary_search_by_key(&server_id, |v| v.0) else {
1582 return None;
1583 };
1584 Some(&self.diagnostics[idx].1)
1585 }
1586
1587 fn request_autoindent(&mut self, cx: &mut Context<Self>) {
1588 if let Some(indent_sizes) = self.compute_autoindents() {
1589 let indent_sizes = cx.background_spawn(indent_sizes);
1590 match cx
1591 .background_executor()
1592 .block_with_timeout(Duration::from_micros(500), indent_sizes)
1593 {
1594 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
1595 Err(indent_sizes) => {
1596 self.pending_autoindent = Some(cx.spawn(async move |this, cx| {
1597 let indent_sizes = indent_sizes.await;
1598 this.update(cx, |this, cx| {
1599 this.apply_autoindents(indent_sizes, cx);
1600 })
1601 .ok();
1602 }));
1603 }
1604 }
1605 } else {
1606 self.autoindent_requests.clear();
1607 }
1608 }
1609
1610 fn compute_autoindents(
1611 &self,
1612 ) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>> + use<>> {
1613 let max_rows_between_yields = 100;
1614 let snapshot = self.snapshot();
1615 if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
1616 return None;
1617 }
1618
1619 let autoindent_requests = self.autoindent_requests.clone();
1620 Some(async move {
1621 let mut indent_sizes = BTreeMap::<u32, (IndentSize, bool)>::new();
1622 for request in autoindent_requests {
1623 // Resolve each edited range to its row in the current buffer and in the
1624 // buffer before this batch of edits.
1625 let mut row_ranges = Vec::new();
1626 let mut old_to_new_rows = BTreeMap::new();
1627 let mut language_indent_sizes_by_new_row = Vec::new();
1628 for entry in &request.entries {
1629 let position = entry.range.start;
1630 let new_row = position.to_point(&snapshot).row;
1631 let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
1632 language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
1633
1634 if !entry.first_line_is_new {
1635 let old_row = position.to_point(&request.before_edit).row;
1636 old_to_new_rows.insert(old_row, new_row);
1637 }
1638 row_ranges.push((new_row..new_end_row, entry.original_indent_column));
1639 }
1640
1641 // Build a map containing the suggested indentation for each of the edited lines
1642 // with respect to the state of the buffer before these edits. This map is keyed
1643 // by the rows for these lines in the current state of the buffer.
1644 let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
1645 let old_edited_ranges =
1646 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
1647 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1648 let mut language_indent_size = IndentSize::default();
1649 for old_edited_range in old_edited_ranges {
1650 let suggestions = request
1651 .before_edit
1652 .suggest_autoindents(old_edited_range.clone())
1653 .into_iter()
1654 .flatten();
1655 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1656 if let Some(suggestion) = suggestion {
1657 let new_row = *old_to_new_rows.get(&old_row).unwrap();
1658
1659 // Find the indent size based on the language for this row.
1660 while let Some((row, size)) = language_indent_sizes.peek() {
1661 if *row > new_row {
1662 break;
1663 }
1664 language_indent_size = *size;
1665 language_indent_sizes.next();
1666 }
1667
1668 let suggested_indent = old_to_new_rows
1669 .get(&suggestion.basis_row)
1670 .and_then(|from_row| {
1671 Some(old_suggestions.get(from_row).copied()?.0)
1672 })
1673 .unwrap_or_else(|| {
1674 request
1675 .before_edit
1676 .indent_size_for_line(suggestion.basis_row)
1677 })
1678 .with_delta(suggestion.delta, language_indent_size);
1679 old_suggestions
1680 .insert(new_row, (suggested_indent, suggestion.within_error));
1681 }
1682 }
1683 yield_now().await;
1684 }
1685
1686 // Compute new suggestions for each line, but only include them in the result
1687 // if they differ from the old suggestion for that line.
1688 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1689 let mut language_indent_size = IndentSize::default();
1690 for (row_range, original_indent_column) in row_ranges {
1691 let new_edited_row_range = if request.is_block_mode {
1692 row_range.start..row_range.start + 1
1693 } else {
1694 row_range.clone()
1695 };
1696
1697 let suggestions = snapshot
1698 .suggest_autoindents(new_edited_row_range.clone())
1699 .into_iter()
1700 .flatten();
1701 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
1702 if let Some(suggestion) = suggestion {
1703 // Find the indent size based on the language for this row.
1704 while let Some((row, size)) = language_indent_sizes.peek() {
1705 if *row > new_row {
1706 break;
1707 }
1708 language_indent_size = *size;
1709 language_indent_sizes.next();
1710 }
1711
1712 let suggested_indent = indent_sizes
1713 .get(&suggestion.basis_row)
1714 .copied()
1715 .map(|e| e.0)
1716 .unwrap_or_else(|| {
1717 snapshot.indent_size_for_line(suggestion.basis_row)
1718 })
1719 .with_delta(suggestion.delta, language_indent_size);
1720
1721 if old_suggestions.get(&new_row).map_or(
1722 true,
1723 |(old_indentation, was_within_error)| {
1724 suggested_indent != *old_indentation
1725 && (!suggestion.within_error || *was_within_error)
1726 },
1727 ) {
1728 indent_sizes.insert(
1729 new_row,
1730 (suggested_indent, request.ignore_empty_lines),
1731 );
1732 }
1733 }
1734 }
1735
1736 if let (true, Some(original_indent_column)) =
1737 (request.is_block_mode, original_indent_column)
1738 {
1739 let new_indent =
1740 if let Some((indent, _)) = indent_sizes.get(&row_range.start) {
1741 *indent
1742 } else {
1743 snapshot.indent_size_for_line(row_range.start)
1744 };
1745 let delta = new_indent.len as i64 - original_indent_column as i64;
1746 if delta != 0 {
1747 for row in row_range.skip(1) {
1748 indent_sizes.entry(row).or_insert_with(|| {
1749 let mut size = snapshot.indent_size_for_line(row);
1750 if size.kind == new_indent.kind {
1751 match delta.cmp(&0) {
1752 Ordering::Greater => size.len += delta as u32,
1753 Ordering::Less => {
1754 size.len = size.len.saturating_sub(-delta as u32)
1755 }
1756 Ordering::Equal => {}
1757 }
1758 }
1759 (size, request.ignore_empty_lines)
1760 });
1761 }
1762 }
1763 }
1764
1765 yield_now().await;
1766 }
1767 }
1768
1769 indent_sizes
1770 .into_iter()
1771 .filter_map(|(row, (indent, ignore_empty_lines))| {
1772 if ignore_empty_lines && snapshot.line_len(row) == 0 {
1773 None
1774 } else {
1775 Some((row, indent))
1776 }
1777 })
1778 .collect()
1779 })
1780 }
1781
1782 fn apply_autoindents(
1783 &mut self,
1784 indent_sizes: BTreeMap<u32, IndentSize>,
1785 cx: &mut Context<Self>,
1786 ) {
1787 self.autoindent_requests.clear();
1788
1789 let edits: Vec<_> = indent_sizes
1790 .into_iter()
1791 .filter_map(|(row, indent_size)| {
1792 let current_size = indent_size_for_line(self, row);
1793 Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
1794 })
1795 .collect();
1796
1797 let preserve_preview = self.preserve_preview();
1798 self.edit(edits, None, cx);
1799 if preserve_preview {
1800 self.refresh_preview();
1801 }
1802 }
1803
1804 /// Create a minimal edit that will cause the given row to be indented
1805 /// with the given size. After applying this edit, the length of the line
1806 /// will always be at least `new_size.len`.
1807 pub fn edit_for_indent_size_adjustment(
1808 row: u32,
1809 current_size: IndentSize,
1810 new_size: IndentSize,
1811 ) -> Option<(Range<Point>, String)> {
1812 if new_size.kind == current_size.kind {
1813 match new_size.len.cmp(¤t_size.len) {
1814 Ordering::Greater => {
1815 let point = Point::new(row, 0);
1816 Some((
1817 point..point,
1818 iter::repeat(new_size.char())
1819 .take((new_size.len - current_size.len) as usize)
1820 .collect::<String>(),
1821 ))
1822 }
1823
1824 Ordering::Less => Some((
1825 Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
1826 String::new(),
1827 )),
1828
1829 Ordering::Equal => None,
1830 }
1831 } else {
1832 Some((
1833 Point::new(row, 0)..Point::new(row, current_size.len),
1834 iter::repeat(new_size.char())
1835 .take(new_size.len as usize)
1836 .collect::<String>(),
1837 ))
1838 }
1839 }
1840
1841 /// Spawns a background task that asynchronously computes a `Diff` between the buffer's text
1842 /// and the given new text.
1843 pub fn diff(&self, mut new_text: String, cx: &App) -> Task<Diff> {
1844 let old_text = self.as_rope().clone();
1845 let base_version = self.version();
1846 cx.background_executor()
1847 .spawn_labeled(*BUFFER_DIFF_TASK, async move {
1848 let old_text = old_text.to_string();
1849 let line_ending = LineEnding::detect(&new_text);
1850 LineEnding::normalize(&mut new_text);
1851 let edits = text_diff(&old_text, &new_text);
1852 Diff {
1853 base_version,
1854 line_ending,
1855 edits,
1856 }
1857 })
1858 }
1859
1860 /// Spawns a background task that searches the buffer for any whitespace
1861 /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
1862 pub fn remove_trailing_whitespace(&self, cx: &App) -> Task<Diff> {
1863 let old_text = self.as_rope().clone();
1864 let line_ending = self.line_ending();
1865 let base_version = self.version();
1866 cx.background_spawn(async move {
1867 let ranges = trailing_whitespace_ranges(&old_text);
1868 let empty = Arc::<str>::from("");
1869 Diff {
1870 base_version,
1871 line_ending,
1872 edits: ranges
1873 .into_iter()
1874 .map(|range| (range, empty.clone()))
1875 .collect(),
1876 }
1877 })
1878 }
1879
1880 /// Ensures that the buffer ends with a single newline character, and
1881 /// no other whitespace.
1882 pub fn ensure_final_newline(&mut self, cx: &mut Context<Self>) {
1883 let len = self.len();
1884 let mut offset = len;
1885 for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
1886 let non_whitespace_len = chunk
1887 .trim_end_matches(|c: char| c.is_ascii_whitespace())
1888 .len();
1889 offset -= chunk.len();
1890 offset += non_whitespace_len;
1891 if non_whitespace_len != 0 {
1892 if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
1893 return;
1894 }
1895 break;
1896 }
1897 }
1898 self.edit([(offset..len, "\n")], None, cx);
1899 }
1900
1901 /// Applies a diff to the buffer. If the buffer has changed since the given diff was
1902 /// calculated, then adjust the diff to account for those changes, and discard any
1903 /// parts of the diff that conflict with those changes.
1904 pub fn apply_diff(&mut self, diff: Diff, cx: &mut Context<Self>) -> Option<TransactionId> {
1905 let snapshot = self.snapshot();
1906 let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
1907 let mut delta = 0;
1908 let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
1909 while let Some(edit_since) = edits_since.peek() {
1910 // If the edit occurs after a diff hunk, then it does not
1911 // affect that hunk.
1912 if edit_since.old.start > range.end {
1913 break;
1914 }
1915 // If the edit precedes the diff hunk, then adjust the hunk
1916 // to reflect the edit.
1917 else if edit_since.old.end < range.start {
1918 delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
1919 edits_since.next();
1920 }
1921 // If the edit intersects a diff hunk, then discard that hunk.
1922 else {
1923 return None;
1924 }
1925 }
1926
1927 let start = (range.start as i64 + delta) as usize;
1928 let end = (range.end as i64 + delta) as usize;
1929 Some((start..end, new_text))
1930 });
1931
1932 self.start_transaction();
1933 self.text.set_line_ending(diff.line_ending);
1934 self.edit(adjusted_edits, None, cx);
1935 self.end_transaction(cx)
1936 }
1937
1938 fn has_unsaved_edits(&self) -> bool {
1939 let (last_version, has_unsaved_edits) = self.has_unsaved_edits.take();
1940
1941 if last_version == self.version {
1942 self.has_unsaved_edits
1943 .set((last_version, has_unsaved_edits));
1944 return has_unsaved_edits;
1945 }
1946
1947 let has_edits = self.has_edits_since(&self.saved_version);
1948 self.has_unsaved_edits
1949 .set((self.version.clone(), has_edits));
1950 has_edits
1951 }
1952
1953 /// Checks if the buffer has unsaved changes.
1954 pub fn is_dirty(&self) -> bool {
1955 if self.capability == Capability::ReadOnly {
1956 return false;
1957 }
1958 if self.has_conflict {
1959 return true;
1960 }
1961 match self.file.as_ref().map(|f| f.disk_state()) {
1962 Some(DiskState::New) | Some(DiskState::Deleted) => {
1963 !self.is_empty() && self.has_unsaved_edits()
1964 }
1965 _ => self.has_unsaved_edits(),
1966 }
1967 }
1968
1969 /// Checks if the buffer and its file have both changed since the buffer
1970 /// was last saved or reloaded.
1971 pub fn has_conflict(&self) -> bool {
1972 if self.has_conflict {
1973 return true;
1974 }
1975 let Some(file) = self.file.as_ref() else {
1976 return false;
1977 };
1978 match file.disk_state() {
1979 DiskState::New => false,
1980 DiskState::Present { mtime } => match self.saved_mtime {
1981 Some(saved_mtime) => {
1982 mtime.bad_is_greater_than(saved_mtime) && self.has_unsaved_edits()
1983 }
1984 None => true,
1985 },
1986 DiskState::Deleted => false,
1987 }
1988 }
1989
1990 /// Gets a [`Subscription`] that tracks all of the changes to the buffer's text.
1991 pub fn subscribe(&mut self) -> Subscription {
1992 self.text.subscribe()
1993 }
1994
1995 /// Adds a bit to the list of bits that are set when the buffer's text changes.
1996 ///
1997 /// This allows downstream code to check if the buffer's text has changed without
1998 /// waiting for an effect cycle, which would be required if using eents.
1999 pub fn record_changes(&mut self, bit: rc::Weak<Cell<bool>>) {
2000 if let Err(ix) = self
2001 .change_bits
2002 .binary_search_by_key(&rc::Weak::as_ptr(&bit), rc::Weak::as_ptr)
2003 {
2004 self.change_bits.insert(ix, bit);
2005 }
2006 }
2007
2008 fn was_changed(&mut self) {
2009 self.change_bits.retain(|change_bit| {
2010 change_bit.upgrade().map_or(false, |bit| {
2011 bit.replace(true);
2012 true
2013 })
2014 });
2015 }
2016
2017 /// Starts a transaction, if one is not already in-progress. When undoing or
2018 /// redoing edits, all of the edits performed within a transaction are undone
2019 /// or redone together.
2020 pub fn start_transaction(&mut self) -> Option<TransactionId> {
2021 self.start_transaction_at(Instant::now())
2022 }
2023
2024 /// Starts a transaction, providing the current time. Subsequent transactions
2025 /// that occur within a short period of time will be grouped together. This
2026 /// is controlled by the buffer's undo grouping duration.
2027 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
2028 self.transaction_depth += 1;
2029 if self.was_dirty_before_starting_transaction.is_none() {
2030 self.was_dirty_before_starting_transaction = Some(self.is_dirty());
2031 }
2032 self.text.start_transaction_at(now)
2033 }
2034
2035 /// Terminates the current transaction, if this is the outermost transaction.
2036 pub fn end_transaction(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2037 self.end_transaction_at(Instant::now(), cx)
2038 }
2039
2040 /// Terminates the current transaction, providing the current time. Subsequent transactions
2041 /// that occur within a short period of time will be grouped together. This
2042 /// is controlled by the buffer's undo grouping duration.
2043 pub fn end_transaction_at(
2044 &mut self,
2045 now: Instant,
2046 cx: &mut Context<Self>,
2047 ) -> Option<TransactionId> {
2048 assert!(self.transaction_depth > 0);
2049 self.transaction_depth -= 1;
2050 let was_dirty = if self.transaction_depth == 0 {
2051 self.was_dirty_before_starting_transaction.take().unwrap()
2052 } else {
2053 false
2054 };
2055 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
2056 self.did_edit(&start_version, was_dirty, cx);
2057 Some(transaction_id)
2058 } else {
2059 None
2060 }
2061 }
2062
2063 /// Manually add a transaction to the buffer's undo history.
2064 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
2065 self.text.push_transaction(transaction, now);
2066 }
2067
2068 /// Prevent the last transaction from being grouped with any subsequent transactions,
2069 /// even if they occur with the buffer's undo grouping duration.
2070 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
2071 self.text.finalize_last_transaction()
2072 }
2073
2074 /// Manually group all changes since a given transaction.
2075 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
2076 self.text.group_until_transaction(transaction_id);
2077 }
2078
2079 /// Manually remove a transaction from the buffer's undo history
2080 pub fn forget_transaction(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
2081 self.text.forget_transaction(transaction_id)
2082 }
2083
2084 /// Retrieve a transaction from the buffer's undo history
2085 pub fn get_transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> {
2086 self.text.get_transaction(transaction_id)
2087 }
2088
2089 /// Manually merge two transactions in the buffer's undo history.
2090 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
2091 self.text.merge_transactions(transaction, destination);
2092 }
2093
2094 /// Waits for the buffer to receive operations with the given timestamps.
2095 pub fn wait_for_edits<It: IntoIterator<Item = clock::Lamport>>(
2096 &mut self,
2097 edit_ids: It,
2098 ) -> impl Future<Output = Result<()>> + use<It> {
2099 self.text.wait_for_edits(edit_ids)
2100 }
2101
2102 /// Waits for the buffer to receive the operations necessary for resolving the given anchors.
2103 pub fn wait_for_anchors<It: IntoIterator<Item = Anchor>>(
2104 &mut self,
2105 anchors: It,
2106 ) -> impl 'static + Future<Output = Result<()>> + use<It> {
2107 self.text.wait_for_anchors(anchors)
2108 }
2109
2110 /// Waits for the buffer to receive operations up to the given version.
2111 pub fn wait_for_version(
2112 &mut self,
2113 version: clock::Global,
2114 ) -> impl Future<Output = Result<()>> + use<> {
2115 self.text.wait_for_version(version)
2116 }
2117
2118 /// Forces all futures returned by [`Buffer::wait_for_version`], [`Buffer::wait_for_edits`], or
2119 /// [`Buffer::wait_for_version`] to resolve with an error.
2120 pub fn give_up_waiting(&mut self) {
2121 self.text.give_up_waiting();
2122 }
2123
2124 /// Stores a set of selections that should be broadcasted to all of the buffer's replicas.
2125 pub fn set_active_selections(
2126 &mut self,
2127 selections: Arc<[Selection<Anchor>]>,
2128 line_mode: bool,
2129 cursor_shape: CursorShape,
2130 cx: &mut Context<Self>,
2131 ) {
2132 let lamport_timestamp = self.text.lamport_clock.tick();
2133 self.remote_selections.insert(
2134 self.text.replica_id(),
2135 SelectionSet {
2136 selections: selections.clone(),
2137 lamport_timestamp,
2138 line_mode,
2139 cursor_shape,
2140 },
2141 );
2142 self.send_operation(
2143 Operation::UpdateSelections {
2144 selections,
2145 line_mode,
2146 lamport_timestamp,
2147 cursor_shape,
2148 },
2149 true,
2150 cx,
2151 );
2152 self.non_text_state_update_count += 1;
2153 cx.notify();
2154 }
2155
2156 /// Clears the selections, so that other replicas of the buffer do not see any selections for
2157 /// this replica.
2158 pub fn remove_active_selections(&mut self, cx: &mut Context<Self>) {
2159 if self
2160 .remote_selections
2161 .get(&self.text.replica_id())
2162 .map_or(true, |set| !set.selections.is_empty())
2163 {
2164 self.set_active_selections(Arc::default(), false, Default::default(), cx);
2165 }
2166 }
2167
2168 pub fn set_agent_selections(
2169 &mut self,
2170 selections: Arc<[Selection<Anchor>]>,
2171 line_mode: bool,
2172 cursor_shape: CursorShape,
2173 cx: &mut Context<Self>,
2174 ) {
2175 let lamport_timestamp = self.text.lamport_clock.tick();
2176 self.remote_selections.insert(
2177 AGENT_REPLICA_ID,
2178 SelectionSet {
2179 selections: selections.clone(),
2180 lamport_timestamp,
2181 line_mode,
2182 cursor_shape,
2183 },
2184 );
2185 self.non_text_state_update_count += 1;
2186 cx.notify();
2187 }
2188
2189 pub fn remove_agent_selections(&mut self, cx: &mut Context<Self>) {
2190 self.set_agent_selections(Arc::default(), false, Default::default(), cx);
2191 }
2192
2193 /// Replaces the buffer's entire text.
2194 pub fn set_text<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2195 where
2196 T: Into<Arc<str>>,
2197 {
2198 self.autoindent_requests.clear();
2199 self.edit([(0..self.len(), text)], None, cx)
2200 }
2201
2202 /// Appends the given text to the end of the buffer.
2203 pub fn append<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2204 where
2205 T: Into<Arc<str>>,
2206 {
2207 self.edit([(self.len()..self.len(), text)], None, cx)
2208 }
2209
2210 /// Applies the given edits to the buffer. Each edit is specified as a range of text to
2211 /// delete, and a string of text to insert at that location.
2212 ///
2213 /// If an [`AutoindentMode`] is provided, then the buffer will enqueue an auto-indent
2214 /// request for the edited ranges, which will be processed when the buffer finishes
2215 /// parsing.
2216 ///
2217 /// Parsing takes place at the end of a transaction, and may compute synchronously
2218 /// or asynchronously, depending on the changes.
2219 pub fn edit<I, S, T>(
2220 &mut self,
2221 edits_iter: I,
2222 autoindent_mode: Option<AutoindentMode>,
2223 cx: &mut Context<Self>,
2224 ) -> Option<clock::Lamport>
2225 where
2226 I: IntoIterator<Item = (Range<S>, T)>,
2227 S: ToOffset,
2228 T: Into<Arc<str>>,
2229 {
2230 // Skip invalid edits and coalesce contiguous ones.
2231 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
2232
2233 for (range, new_text) in edits_iter {
2234 let mut range = range.start.to_offset(self)..range.end.to_offset(self);
2235
2236 if range.start > range.end {
2237 mem::swap(&mut range.start, &mut range.end);
2238 }
2239 let new_text = new_text.into();
2240 if !new_text.is_empty() || !range.is_empty() {
2241 if let Some((prev_range, prev_text)) = edits.last_mut() {
2242 if prev_range.end >= range.start {
2243 prev_range.end = cmp::max(prev_range.end, range.end);
2244 *prev_text = format!("{prev_text}{new_text}").into();
2245 } else {
2246 edits.push((range, new_text));
2247 }
2248 } else {
2249 edits.push((range, new_text));
2250 }
2251 }
2252 }
2253 if edits.is_empty() {
2254 return None;
2255 }
2256
2257 self.start_transaction();
2258 self.pending_autoindent.take();
2259 let autoindent_request = autoindent_mode
2260 .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
2261
2262 let edit_operation = self.text.edit(edits.iter().cloned());
2263 let edit_id = edit_operation.timestamp();
2264
2265 if let Some((before_edit, mode)) = autoindent_request {
2266 let mut delta = 0isize;
2267 let entries = edits
2268 .into_iter()
2269 .enumerate()
2270 .zip(&edit_operation.as_edit().unwrap().new_text)
2271 .map(|((ix, (range, _)), new_text)| {
2272 let new_text_length = new_text.len();
2273 let old_start = range.start.to_point(&before_edit);
2274 let new_start = (delta + range.start as isize) as usize;
2275 let range_len = range.end - range.start;
2276 delta += new_text_length as isize - range_len as isize;
2277
2278 // Decide what range of the insertion to auto-indent, and whether
2279 // the first line of the insertion should be considered a newly-inserted line
2280 // or an edit to an existing line.
2281 let mut range_of_insertion_to_indent = 0..new_text_length;
2282 let mut first_line_is_new = true;
2283
2284 let old_line_start = before_edit.indent_size_for_line(old_start.row).len;
2285 let old_line_end = before_edit.line_len(old_start.row);
2286
2287 if old_start.column > old_line_start {
2288 first_line_is_new = false;
2289 }
2290
2291 if !new_text.contains('\n')
2292 && (old_start.column + (range_len as u32) < old_line_end
2293 || old_line_end == old_line_start)
2294 {
2295 first_line_is_new = false;
2296 }
2297
2298 // When inserting text starting with a newline, avoid auto-indenting the
2299 // previous line.
2300 if new_text.starts_with('\n') {
2301 range_of_insertion_to_indent.start += 1;
2302 first_line_is_new = true;
2303 }
2304
2305 let mut original_indent_column = None;
2306 if let AutoindentMode::Block {
2307 original_indent_columns,
2308 } = &mode
2309 {
2310 original_indent_column = Some(if new_text.starts_with('\n') {
2311 indent_size_for_text(
2312 new_text[range_of_insertion_to_indent.clone()].chars(),
2313 )
2314 .len
2315 } else {
2316 original_indent_columns
2317 .get(ix)
2318 .copied()
2319 .flatten()
2320 .unwrap_or_else(|| {
2321 indent_size_for_text(
2322 new_text[range_of_insertion_to_indent.clone()].chars(),
2323 )
2324 .len
2325 })
2326 });
2327
2328 // Avoid auto-indenting the line after the edit.
2329 if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
2330 range_of_insertion_to_indent.end -= 1;
2331 }
2332 }
2333
2334 AutoindentRequestEntry {
2335 first_line_is_new,
2336 original_indent_column,
2337 indent_size: before_edit.language_indent_size_at(range.start, cx),
2338 range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
2339 ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
2340 }
2341 })
2342 .collect();
2343
2344 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2345 before_edit,
2346 entries,
2347 is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
2348 ignore_empty_lines: false,
2349 }));
2350 }
2351
2352 self.end_transaction(cx);
2353 self.send_operation(Operation::Buffer(edit_operation), true, cx);
2354 Some(edit_id)
2355 }
2356
2357 fn did_edit(&mut self, old_version: &clock::Global, was_dirty: bool, cx: &mut Context<Self>) {
2358 self.was_changed();
2359
2360 if self.edits_since::<usize>(old_version).next().is_none() {
2361 return;
2362 }
2363
2364 self.reparse(cx);
2365 cx.emit(BufferEvent::Edited);
2366 if was_dirty != self.is_dirty() {
2367 cx.emit(BufferEvent::DirtyChanged);
2368 }
2369 cx.notify();
2370 }
2371
2372 pub fn autoindent_ranges<I, T>(&mut self, ranges: I, cx: &mut Context<Self>)
2373 where
2374 I: IntoIterator<Item = Range<T>>,
2375 T: ToOffset + Copy,
2376 {
2377 let before_edit = self.snapshot();
2378 let entries = ranges
2379 .into_iter()
2380 .map(|range| AutoindentRequestEntry {
2381 range: before_edit.anchor_before(range.start)..before_edit.anchor_after(range.end),
2382 first_line_is_new: true,
2383 indent_size: before_edit.language_indent_size_at(range.start, cx),
2384 original_indent_column: None,
2385 })
2386 .collect();
2387 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2388 before_edit,
2389 entries,
2390 is_block_mode: false,
2391 ignore_empty_lines: true,
2392 }));
2393 self.request_autoindent(cx);
2394 }
2395
2396 // Inserts newlines at the given position to create an empty line, returning the start of the new line.
2397 // You can also request the insertion of empty lines above and below the line starting at the returned point.
2398 pub fn insert_empty_line(
2399 &mut self,
2400 position: impl ToPoint,
2401 space_above: bool,
2402 space_below: bool,
2403 cx: &mut Context<Self>,
2404 ) -> Point {
2405 let mut position = position.to_point(self);
2406
2407 self.start_transaction();
2408
2409 self.edit(
2410 [(position..position, "\n")],
2411 Some(AutoindentMode::EachLine),
2412 cx,
2413 );
2414
2415 if position.column > 0 {
2416 position += Point::new(1, 0);
2417 }
2418
2419 if !self.is_line_blank(position.row) {
2420 self.edit(
2421 [(position..position, "\n")],
2422 Some(AutoindentMode::EachLine),
2423 cx,
2424 );
2425 }
2426
2427 if space_above && position.row > 0 && !self.is_line_blank(position.row - 1) {
2428 self.edit(
2429 [(position..position, "\n")],
2430 Some(AutoindentMode::EachLine),
2431 cx,
2432 );
2433 position.row += 1;
2434 }
2435
2436 if space_below
2437 && (position.row == self.max_point().row || !self.is_line_blank(position.row + 1))
2438 {
2439 self.edit(
2440 [(position..position, "\n")],
2441 Some(AutoindentMode::EachLine),
2442 cx,
2443 );
2444 }
2445
2446 self.end_transaction(cx);
2447
2448 position
2449 }
2450
2451 /// Applies the given remote operations to the buffer.
2452 pub fn apply_ops<I: IntoIterator<Item = Operation>>(&mut self, ops: I, cx: &mut Context<Self>) {
2453 self.pending_autoindent.take();
2454 let was_dirty = self.is_dirty();
2455 let old_version = self.version.clone();
2456 let mut deferred_ops = Vec::new();
2457 let buffer_ops = ops
2458 .into_iter()
2459 .filter_map(|op| match op {
2460 Operation::Buffer(op) => Some(op),
2461 _ => {
2462 if self.can_apply_op(&op) {
2463 self.apply_op(op, cx);
2464 } else {
2465 deferred_ops.push(op);
2466 }
2467 None
2468 }
2469 })
2470 .collect::<Vec<_>>();
2471 for operation in buffer_ops.iter() {
2472 self.send_operation(Operation::Buffer(operation.clone()), false, cx);
2473 }
2474 self.text.apply_ops(buffer_ops);
2475 self.deferred_ops.insert(deferred_ops);
2476 self.flush_deferred_ops(cx);
2477 self.did_edit(&old_version, was_dirty, cx);
2478 // Notify independently of whether the buffer was edited as the operations could include a
2479 // selection update.
2480 cx.notify();
2481 }
2482
2483 fn flush_deferred_ops(&mut self, cx: &mut Context<Self>) {
2484 let mut deferred_ops = Vec::new();
2485 for op in self.deferred_ops.drain().iter().cloned() {
2486 if self.can_apply_op(&op) {
2487 self.apply_op(op, cx);
2488 } else {
2489 deferred_ops.push(op);
2490 }
2491 }
2492 self.deferred_ops.insert(deferred_ops);
2493 }
2494
2495 pub fn has_deferred_ops(&self) -> bool {
2496 !self.deferred_ops.is_empty() || self.text.has_deferred_ops()
2497 }
2498
2499 fn can_apply_op(&self, operation: &Operation) -> bool {
2500 match operation {
2501 Operation::Buffer(_) => {
2502 unreachable!("buffer operations should never be applied at this layer")
2503 }
2504 Operation::UpdateDiagnostics {
2505 diagnostics: diagnostic_set,
2506 ..
2507 } => diagnostic_set.iter().all(|diagnostic| {
2508 self.text.can_resolve(&diagnostic.range.start)
2509 && self.text.can_resolve(&diagnostic.range.end)
2510 }),
2511 Operation::UpdateSelections { selections, .. } => selections
2512 .iter()
2513 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
2514 Operation::UpdateCompletionTriggers { .. } => true,
2515 }
2516 }
2517
2518 fn apply_op(&mut self, operation: Operation, cx: &mut Context<Self>) {
2519 match operation {
2520 Operation::Buffer(_) => {
2521 unreachable!("buffer operations should never be applied at this layer")
2522 }
2523 Operation::UpdateDiagnostics {
2524 server_id,
2525 diagnostics: diagnostic_set,
2526 lamport_timestamp,
2527 } => {
2528 let snapshot = self.snapshot();
2529 self.apply_diagnostic_update(
2530 server_id,
2531 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
2532 lamport_timestamp,
2533 cx,
2534 );
2535 }
2536 Operation::UpdateSelections {
2537 selections,
2538 lamport_timestamp,
2539 line_mode,
2540 cursor_shape,
2541 } => {
2542 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id) {
2543 if set.lamport_timestamp > lamport_timestamp {
2544 return;
2545 }
2546 }
2547
2548 self.remote_selections.insert(
2549 lamport_timestamp.replica_id,
2550 SelectionSet {
2551 selections,
2552 lamport_timestamp,
2553 line_mode,
2554 cursor_shape,
2555 },
2556 );
2557 self.text.lamport_clock.observe(lamport_timestamp);
2558 self.non_text_state_update_count += 1;
2559 }
2560 Operation::UpdateCompletionTriggers {
2561 triggers,
2562 lamport_timestamp,
2563 server_id,
2564 } => {
2565 if triggers.is_empty() {
2566 self.completion_triggers_per_language_server
2567 .remove(&server_id);
2568 self.completion_triggers = self
2569 .completion_triggers_per_language_server
2570 .values()
2571 .flat_map(|triggers| triggers.into_iter().cloned())
2572 .collect();
2573 } else {
2574 self.completion_triggers_per_language_server
2575 .insert(server_id, triggers.iter().cloned().collect());
2576 self.completion_triggers.extend(triggers);
2577 }
2578 self.text.lamport_clock.observe(lamport_timestamp);
2579 }
2580 }
2581 }
2582
2583 fn apply_diagnostic_update(
2584 &mut self,
2585 server_id: LanguageServerId,
2586 diagnostics: DiagnosticSet,
2587 lamport_timestamp: clock::Lamport,
2588 cx: &mut Context<Self>,
2589 ) {
2590 if lamport_timestamp > self.diagnostics_timestamp {
2591 let ix = self.diagnostics.binary_search_by_key(&server_id, |e| e.0);
2592 if diagnostics.is_empty() {
2593 if let Ok(ix) = ix {
2594 self.diagnostics.remove(ix);
2595 }
2596 } else {
2597 match ix {
2598 Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)),
2599 Ok(ix) => self.diagnostics[ix].1 = diagnostics,
2600 };
2601 }
2602 self.diagnostics_timestamp = lamport_timestamp;
2603 self.non_text_state_update_count += 1;
2604 self.text.lamport_clock.observe(lamport_timestamp);
2605 cx.notify();
2606 cx.emit(BufferEvent::DiagnosticsUpdated);
2607 }
2608 }
2609
2610 fn send_operation(&mut self, operation: Operation, is_local: bool, cx: &mut Context<Self>) {
2611 self.was_changed();
2612 cx.emit(BufferEvent::Operation {
2613 operation,
2614 is_local,
2615 });
2616 }
2617
2618 /// Removes the selections for a given peer.
2619 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut Context<Self>) {
2620 self.remote_selections.remove(&replica_id);
2621 cx.notify();
2622 }
2623
2624 /// Undoes the most recent transaction.
2625 pub fn undo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2626 let was_dirty = self.is_dirty();
2627 let old_version = self.version.clone();
2628
2629 if let Some((transaction_id, operation)) = self.text.undo() {
2630 self.send_operation(Operation::Buffer(operation), true, cx);
2631 self.did_edit(&old_version, was_dirty, cx);
2632 Some(transaction_id)
2633 } else {
2634 None
2635 }
2636 }
2637
2638 /// Manually undoes a specific transaction in the buffer's undo history.
2639 pub fn undo_transaction(
2640 &mut self,
2641 transaction_id: TransactionId,
2642 cx: &mut Context<Self>,
2643 ) -> bool {
2644 let was_dirty = self.is_dirty();
2645 let old_version = self.version.clone();
2646 if let Some(operation) = self.text.undo_transaction(transaction_id) {
2647 self.send_operation(Operation::Buffer(operation), true, cx);
2648 self.did_edit(&old_version, was_dirty, cx);
2649 true
2650 } else {
2651 false
2652 }
2653 }
2654
2655 /// Manually undoes all changes after a given transaction in the buffer's undo history.
2656 pub fn undo_to_transaction(
2657 &mut self,
2658 transaction_id: TransactionId,
2659 cx: &mut Context<Self>,
2660 ) -> bool {
2661 let was_dirty = self.is_dirty();
2662 let old_version = self.version.clone();
2663
2664 let operations = self.text.undo_to_transaction(transaction_id);
2665 let undone = !operations.is_empty();
2666 for operation in operations {
2667 self.send_operation(Operation::Buffer(operation), true, cx);
2668 }
2669 if undone {
2670 self.did_edit(&old_version, was_dirty, cx)
2671 }
2672 undone
2673 }
2674
2675 pub fn undo_operations(&mut self, counts: HashMap<Lamport, u32>, cx: &mut Context<Buffer>) {
2676 let was_dirty = self.is_dirty();
2677 let operation = self.text.undo_operations(counts);
2678 let old_version = self.version.clone();
2679 self.send_operation(Operation::Buffer(operation), true, cx);
2680 self.did_edit(&old_version, was_dirty, cx);
2681 }
2682
2683 /// Manually redoes a specific transaction in the buffer's redo history.
2684 pub fn redo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2685 let was_dirty = self.is_dirty();
2686 let old_version = self.version.clone();
2687
2688 if let Some((transaction_id, operation)) = self.text.redo() {
2689 self.send_operation(Operation::Buffer(operation), true, cx);
2690 self.did_edit(&old_version, was_dirty, cx);
2691 Some(transaction_id)
2692 } else {
2693 None
2694 }
2695 }
2696
2697 /// Manually undoes all changes until a given transaction in the buffer's redo history.
2698 pub fn redo_to_transaction(
2699 &mut self,
2700 transaction_id: TransactionId,
2701 cx: &mut Context<Self>,
2702 ) -> bool {
2703 let was_dirty = self.is_dirty();
2704 let old_version = self.version.clone();
2705
2706 let operations = self.text.redo_to_transaction(transaction_id);
2707 let redone = !operations.is_empty();
2708 for operation in operations {
2709 self.send_operation(Operation::Buffer(operation), true, cx);
2710 }
2711 if redone {
2712 self.did_edit(&old_version, was_dirty, cx)
2713 }
2714 redone
2715 }
2716
2717 /// Override current completion triggers with the user-provided completion triggers.
2718 pub fn set_completion_triggers(
2719 &mut self,
2720 server_id: LanguageServerId,
2721 triggers: BTreeSet<String>,
2722 cx: &mut Context<Self>,
2723 ) {
2724 self.completion_triggers_timestamp = self.text.lamport_clock.tick();
2725 if triggers.is_empty() {
2726 self.completion_triggers_per_language_server
2727 .remove(&server_id);
2728 self.completion_triggers = self
2729 .completion_triggers_per_language_server
2730 .values()
2731 .flat_map(|triggers| triggers.into_iter().cloned())
2732 .collect();
2733 } else {
2734 self.completion_triggers_per_language_server
2735 .insert(server_id, triggers.clone());
2736 self.completion_triggers.extend(triggers.iter().cloned());
2737 }
2738 self.send_operation(
2739 Operation::UpdateCompletionTriggers {
2740 triggers: triggers.into_iter().collect(),
2741 lamport_timestamp: self.completion_triggers_timestamp,
2742 server_id,
2743 },
2744 true,
2745 cx,
2746 );
2747 cx.notify();
2748 }
2749
2750 /// Returns a list of strings which trigger a completion menu for this language.
2751 /// Usually this is driven by LSP server which returns a list of trigger characters for completions.
2752 pub fn completion_triggers(&self) -> &BTreeSet<String> {
2753 &self.completion_triggers
2754 }
2755
2756 /// Call this directly after performing edits to prevent the preview tab
2757 /// from being dismissed by those edits. It causes `should_dismiss_preview`
2758 /// to return false until there are additional edits.
2759 pub fn refresh_preview(&mut self) {
2760 self.preview_version = self.version.clone();
2761 }
2762
2763 /// Whether we should preserve the preview status of a tab containing this buffer.
2764 pub fn preserve_preview(&self) -> bool {
2765 !self.has_edits_since(&self.preview_version)
2766 }
2767
2768 pub fn result_id(&self) -> Option<String> {
2769 self.pull_diagnostics_result_id.clone()
2770 }
2771
2772 pub fn set_result_id(&mut self, result_id: Option<String>) {
2773 self.pull_diagnostics_result_id = result_id;
2774 }
2775}
2776
2777#[doc(hidden)]
2778#[cfg(any(test, feature = "test-support"))]
2779impl Buffer {
2780 pub fn edit_via_marked_text(
2781 &mut self,
2782 marked_string: &str,
2783 autoindent_mode: Option<AutoindentMode>,
2784 cx: &mut Context<Self>,
2785 ) {
2786 let edits = self.edits_for_marked_text(marked_string);
2787 self.edit(edits, autoindent_mode, cx);
2788 }
2789
2790 pub fn set_group_interval(&mut self, group_interval: Duration) {
2791 self.text.set_group_interval(group_interval);
2792 }
2793
2794 pub fn randomly_edit<T>(&mut self, rng: &mut T, old_range_count: usize, cx: &mut Context<Self>)
2795 where
2796 T: rand::Rng,
2797 {
2798 let mut edits: Vec<(Range<usize>, String)> = Vec::new();
2799 let mut last_end = None;
2800 for _ in 0..old_range_count {
2801 if last_end.map_or(false, |last_end| last_end >= self.len()) {
2802 break;
2803 }
2804
2805 let new_start = last_end.map_or(0, |last_end| last_end + 1);
2806 let mut range = self.random_byte_range(new_start, rng);
2807 if rng.gen_bool(0.2) {
2808 mem::swap(&mut range.start, &mut range.end);
2809 }
2810 last_end = Some(range.end);
2811
2812 let new_text_len = rng.gen_range(0..10);
2813 let mut new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
2814 new_text = new_text.to_uppercase();
2815
2816 edits.push((range, new_text));
2817 }
2818 log::info!("mutating buffer {} with {:?}", self.replica_id(), edits);
2819 self.edit(edits, None, cx);
2820 }
2821
2822 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut Context<Self>) {
2823 let was_dirty = self.is_dirty();
2824 let old_version = self.version.clone();
2825
2826 let ops = self.text.randomly_undo_redo(rng);
2827 if !ops.is_empty() {
2828 for op in ops {
2829 self.send_operation(Operation::Buffer(op), true, cx);
2830 self.did_edit(&old_version, was_dirty, cx);
2831 }
2832 }
2833 }
2834}
2835
2836impl EventEmitter<BufferEvent> for Buffer {}
2837
2838impl Deref for Buffer {
2839 type Target = TextBuffer;
2840
2841 fn deref(&self) -> &Self::Target {
2842 &self.text
2843 }
2844}
2845
2846impl BufferSnapshot {
2847 /// Returns [`IndentSize`] for a given line that respects user settings and
2848 /// language preferences.
2849 pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
2850 indent_size_for_line(self, row)
2851 }
2852
2853 /// Returns [`IndentSize`] for a given position that respects user settings
2854 /// and language preferences.
2855 pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &App) -> IndentSize {
2856 let settings = language_settings(
2857 self.language_at(position).map(|l| l.name()),
2858 self.file(),
2859 cx,
2860 );
2861 if settings.hard_tabs {
2862 IndentSize::tab()
2863 } else {
2864 IndentSize::spaces(settings.tab_size.get())
2865 }
2866 }
2867
2868 /// Retrieve the suggested indent size for all of the given rows. The unit of indentation
2869 /// is passed in as `single_indent_size`.
2870 pub fn suggested_indents(
2871 &self,
2872 rows: impl Iterator<Item = u32>,
2873 single_indent_size: IndentSize,
2874 ) -> BTreeMap<u32, IndentSize> {
2875 let mut result = BTreeMap::new();
2876
2877 for row_range in contiguous_ranges(rows, 10) {
2878 let suggestions = match self.suggest_autoindents(row_range.clone()) {
2879 Some(suggestions) => suggestions,
2880 _ => break,
2881 };
2882
2883 for (row, suggestion) in row_range.zip(suggestions) {
2884 let indent_size = if let Some(suggestion) = suggestion {
2885 result
2886 .get(&suggestion.basis_row)
2887 .copied()
2888 .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
2889 .with_delta(suggestion.delta, single_indent_size)
2890 } else {
2891 self.indent_size_for_line(row)
2892 };
2893
2894 result.insert(row, indent_size);
2895 }
2896 }
2897
2898 result
2899 }
2900
2901 fn suggest_autoindents(
2902 &self,
2903 row_range: Range<u32>,
2904 ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
2905 let config = &self.language.as_ref()?.config;
2906 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
2907 let significant_indentation = config.significant_indentation;
2908
2909 // Find the suggested indentation ranges based on the syntax tree.
2910 let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
2911 let end = Point::new(row_range.end, 0);
2912 let range = (start..end).to_offset(&self.text);
2913 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2914 Some(&grammar.indents_config.as_ref()?.query)
2915 });
2916 let indent_configs = matches
2917 .grammars()
2918 .iter()
2919 .map(|grammar| grammar.indents_config.as_ref().unwrap())
2920 .collect::<Vec<_>>();
2921
2922 let mut indent_ranges = Vec::<Range<Point>>::new();
2923 let mut outdent_positions = Vec::<Point>::new();
2924 while let Some(mat) = matches.peek() {
2925 let mut start: Option<Point> = None;
2926 let mut end: Option<Point> = None;
2927 let mut outdent: Option<Point> = None;
2928
2929 let config = &indent_configs[mat.grammar_index];
2930 for capture in mat.captures {
2931 if capture.index == config.indent_capture_ix {
2932 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
2933 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
2934 } else if Some(capture.index) == config.start_capture_ix {
2935 start = Some(Point::from_ts_point(capture.node.end_position()));
2936 } else if Some(capture.index) == config.end_capture_ix {
2937 end = Some(Point::from_ts_point(capture.node.start_position()));
2938 } else if Some(capture.index) == config.outdent_capture_ix {
2939 let point = Point::from_ts_point(capture.node.start_position());
2940 outdent.get_or_insert(point);
2941 outdent_positions.push(point);
2942 }
2943 }
2944
2945 matches.advance();
2946 // in case of significant indentation expand end to outdent position
2947 let end = if significant_indentation {
2948 outdent.or(end)
2949 } else {
2950 end
2951 };
2952 if let Some((start, end)) = start.zip(end) {
2953 if start.row == end.row && (!significant_indentation || start.column < end.column) {
2954 continue;
2955 }
2956 let range = start..end;
2957 match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
2958 Err(ix) => indent_ranges.insert(ix, range),
2959 Ok(ix) => {
2960 let prev_range = &mut indent_ranges[ix];
2961 prev_range.end = prev_range.end.max(range.end);
2962 }
2963 }
2964 }
2965 }
2966
2967 let mut error_ranges = Vec::<Range<Point>>::new();
2968 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2969 grammar.error_query.as_ref()
2970 });
2971 while let Some(mat) = matches.peek() {
2972 let node = mat.captures[0].node;
2973 let start = Point::from_ts_point(node.start_position());
2974 let end = Point::from_ts_point(node.end_position());
2975 let range = start..end;
2976 let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
2977 Ok(ix) | Err(ix) => ix,
2978 };
2979 let mut end_ix = ix;
2980 while let Some(existing_range) = error_ranges.get(end_ix) {
2981 if existing_range.end < end {
2982 end_ix += 1;
2983 } else {
2984 break;
2985 }
2986 }
2987 error_ranges.splice(ix..end_ix, [range]);
2988 matches.advance();
2989 }
2990
2991 // we don't use outdent positions to truncate in case of significant indentation
2992 // rather we use them to expand (handled above)
2993 if !significant_indentation {
2994 outdent_positions.sort();
2995 for outdent_position in outdent_positions {
2996 // find the innermost indent range containing this outdent_position
2997 // set its end to the outdent position
2998 if let Some(range_to_truncate) = indent_ranges
2999 .iter_mut()
3000 .filter(|indent_range| indent_range.contains(&outdent_position))
3001 .next_back()
3002 {
3003 range_to_truncate.end = outdent_position;
3004 }
3005 }
3006 }
3007
3008 // Find the suggested indentation increases and decreased based on regexes.
3009 let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
3010 self.for_each_line(
3011 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
3012 ..Point::new(row_range.end, 0),
3013 |row, line| {
3014 if config
3015 .decrease_indent_pattern
3016 .as_ref()
3017 .map_or(false, |regex| regex.is_match(line))
3018 {
3019 indent_change_rows.push((row, Ordering::Less));
3020 }
3021 if config
3022 .increase_indent_pattern
3023 .as_ref()
3024 .map_or(false, |regex| regex.is_match(line))
3025 {
3026 indent_change_rows.push((row + 1, Ordering::Greater));
3027 }
3028 },
3029 );
3030
3031 let mut indent_changes = indent_change_rows.into_iter().peekable();
3032 let mut prev_row = if config.auto_indent_using_last_non_empty_line {
3033 prev_non_blank_row.unwrap_or(0)
3034 } else {
3035 row_range.start.saturating_sub(1)
3036 };
3037 let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
3038 Some(row_range.map(move |row| {
3039 let row_start = Point::new(row, self.indent_size_for_line(row).len);
3040
3041 let mut indent_from_prev_row = false;
3042 let mut outdent_from_prev_row = false;
3043 let mut outdent_to_row = u32::MAX;
3044 let mut from_regex = false;
3045
3046 while let Some((indent_row, delta)) = indent_changes.peek() {
3047 match indent_row.cmp(&row) {
3048 Ordering::Equal => match delta {
3049 Ordering::Less => {
3050 from_regex = true;
3051 outdent_from_prev_row = true
3052 }
3053 Ordering::Greater => {
3054 indent_from_prev_row = true;
3055 from_regex = true
3056 }
3057 _ => {}
3058 },
3059
3060 Ordering::Greater => break,
3061 Ordering::Less => {}
3062 }
3063
3064 indent_changes.next();
3065 }
3066
3067 for range in &indent_ranges {
3068 if range.start.row >= row {
3069 break;
3070 }
3071 if range.start.row == prev_row && range.end > row_start {
3072 indent_from_prev_row = true;
3073 }
3074 if significant_indentation && self.is_line_blank(row) && range.start.row == prev_row
3075 {
3076 indent_from_prev_row = true;
3077 }
3078 if !significant_indentation || !self.is_line_blank(row) {
3079 if range.end > prev_row_start && range.end <= row_start {
3080 outdent_to_row = outdent_to_row.min(range.start.row);
3081 }
3082 }
3083 }
3084
3085 let within_error = error_ranges
3086 .iter()
3087 .any(|e| e.start.row < row && e.end > row_start);
3088
3089 let suggestion = if outdent_to_row == prev_row
3090 || (outdent_from_prev_row && indent_from_prev_row)
3091 {
3092 Some(IndentSuggestion {
3093 basis_row: prev_row,
3094 delta: Ordering::Equal,
3095 within_error: within_error && !from_regex,
3096 })
3097 } else if indent_from_prev_row {
3098 Some(IndentSuggestion {
3099 basis_row: prev_row,
3100 delta: Ordering::Greater,
3101 within_error: within_error && !from_regex,
3102 })
3103 } else if outdent_to_row < prev_row {
3104 Some(IndentSuggestion {
3105 basis_row: outdent_to_row,
3106 delta: Ordering::Equal,
3107 within_error: within_error && !from_regex,
3108 })
3109 } else if outdent_from_prev_row {
3110 Some(IndentSuggestion {
3111 basis_row: prev_row,
3112 delta: Ordering::Less,
3113 within_error: within_error && !from_regex,
3114 })
3115 } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
3116 {
3117 Some(IndentSuggestion {
3118 basis_row: prev_row,
3119 delta: Ordering::Equal,
3120 within_error: within_error && !from_regex,
3121 })
3122 } else {
3123 None
3124 };
3125
3126 prev_row = row;
3127 prev_row_start = row_start;
3128 suggestion
3129 }))
3130 }
3131
3132 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
3133 while row > 0 {
3134 row -= 1;
3135 if !self.is_line_blank(row) {
3136 return Some(row);
3137 }
3138 }
3139 None
3140 }
3141
3142 fn get_highlights(&self, range: Range<usize>) -> (SyntaxMapCaptures, Vec<HighlightMap>) {
3143 let captures = self.syntax.captures(range, &self.text, |grammar| {
3144 grammar.highlights_query.as_ref()
3145 });
3146 let highlight_maps = captures
3147 .grammars()
3148 .iter()
3149 .map(|grammar| grammar.highlight_map())
3150 .collect();
3151 (captures, highlight_maps)
3152 }
3153
3154 /// Iterates over chunks of text in the given range of the buffer. Text is chunked
3155 /// in an arbitrary way due to being stored in a [`Rope`](text::Rope). The text is also
3156 /// returned in chunks where each chunk has a single syntax highlighting style and
3157 /// diagnostic status.
3158 pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks {
3159 let range = range.start.to_offset(self)..range.end.to_offset(self);
3160
3161 let mut syntax = None;
3162 if language_aware {
3163 syntax = Some(self.get_highlights(range.clone()));
3164 }
3165 // We want to look at diagnostic spans only when iterating over language-annotated chunks.
3166 let diagnostics = language_aware;
3167 BufferChunks::new(self.text.as_rope(), range, syntax, diagnostics, Some(self))
3168 }
3169
3170 pub fn highlighted_text_for_range<T: ToOffset>(
3171 &self,
3172 range: Range<T>,
3173 override_style: Option<HighlightStyle>,
3174 syntax_theme: &SyntaxTheme,
3175 ) -> HighlightedText {
3176 HighlightedText::from_buffer_range(
3177 range,
3178 &self.text,
3179 &self.syntax,
3180 override_style,
3181 syntax_theme,
3182 )
3183 }
3184
3185 /// Invokes the given callback for each line of text in the given range of the buffer.
3186 /// Uses callback to avoid allocating a string for each line.
3187 fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
3188 let mut line = String::new();
3189 let mut row = range.start.row;
3190 for chunk in self
3191 .as_rope()
3192 .chunks_in_range(range.to_offset(self))
3193 .chain(["\n"])
3194 {
3195 for (newline_ix, text) in chunk.split('\n').enumerate() {
3196 if newline_ix > 0 {
3197 callback(row, &line);
3198 row += 1;
3199 line.clear();
3200 }
3201 line.push_str(text);
3202 }
3203 }
3204 }
3205
3206 /// Iterates over every [`SyntaxLayer`] in the buffer.
3207 pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayer> + '_ {
3208 self.syntax
3209 .layers_for_range(0..self.len(), &self.text, true)
3210 }
3211
3212 pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayer> {
3213 let offset = position.to_offset(self);
3214 self.syntax
3215 .layers_for_range(offset..offset, &self.text, false)
3216 .filter(|l| l.node().end_byte() > offset)
3217 .last()
3218 }
3219
3220 pub fn smallest_syntax_layer_containing<D: ToOffset>(
3221 &self,
3222 range: Range<D>,
3223 ) -> Option<SyntaxLayer> {
3224 let range = range.to_offset(self);
3225 return self
3226 .syntax
3227 .layers_for_range(range, &self.text, false)
3228 .max_by(|a, b| {
3229 if a.depth != b.depth {
3230 a.depth.cmp(&b.depth)
3231 } else if a.offset.0 != b.offset.0 {
3232 a.offset.0.cmp(&b.offset.0)
3233 } else {
3234 a.node().end_byte().cmp(&b.node().end_byte()).reverse()
3235 }
3236 });
3237 }
3238
3239 /// Returns the main [`Language`].
3240 pub fn language(&self) -> Option<&Arc<Language>> {
3241 self.language.as_ref()
3242 }
3243
3244 /// Returns the [`Language`] at the given location.
3245 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
3246 self.syntax_layer_at(position)
3247 .map(|info| info.language)
3248 .or(self.language.as_ref())
3249 }
3250
3251 /// Returns the settings for the language at the given location.
3252 pub fn settings_at<'a, D: ToOffset>(
3253 &'a self,
3254 position: D,
3255 cx: &'a App,
3256 ) -> Cow<'a, LanguageSettings> {
3257 language_settings(
3258 self.language_at(position).map(|l| l.name()),
3259 self.file.as_ref(),
3260 cx,
3261 )
3262 }
3263
3264 pub fn char_classifier_at<T: ToOffset>(&self, point: T) -> CharClassifier {
3265 CharClassifier::new(self.language_scope_at(point))
3266 }
3267
3268 /// Returns the [`LanguageScope`] at the given location.
3269 pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
3270 let offset = position.to_offset(self);
3271 let mut scope = None;
3272 let mut smallest_range_and_depth: Option<(Range<usize>, usize)> = None;
3273
3274 // Use the layer that has the smallest node intersecting the given point.
3275 for layer in self
3276 .syntax
3277 .layers_for_range(offset..offset, &self.text, false)
3278 {
3279 let mut cursor = layer.node().walk();
3280
3281 let mut range = None;
3282 loop {
3283 let child_range = cursor.node().byte_range();
3284 if !child_range.contains(&offset) {
3285 break;
3286 }
3287
3288 range = Some(child_range);
3289 if cursor.goto_first_child_for_byte(offset).is_none() {
3290 break;
3291 }
3292 }
3293
3294 if let Some(range) = range {
3295 if smallest_range_and_depth.as_ref().map_or(
3296 true,
3297 |(smallest_range, smallest_range_depth)| {
3298 if layer.depth > *smallest_range_depth {
3299 true
3300 } else if layer.depth == *smallest_range_depth {
3301 range.len() < smallest_range.len()
3302 } else {
3303 false
3304 }
3305 },
3306 ) {
3307 smallest_range_and_depth = Some((range, layer.depth));
3308 scope = Some(LanguageScope {
3309 language: layer.language.clone(),
3310 override_id: layer.override_id(offset, &self.text),
3311 });
3312 }
3313 }
3314 }
3315
3316 scope.or_else(|| {
3317 self.language.clone().map(|language| LanguageScope {
3318 language,
3319 override_id: None,
3320 })
3321 })
3322 }
3323
3324 /// Returns a tuple of the range and character kind of the word
3325 /// surrounding the given position.
3326 pub fn surrounding_word<T: ToOffset>(&self, start: T) -> (Range<usize>, Option<CharKind>) {
3327 let mut start = start.to_offset(self);
3328 let mut end = start;
3329 let mut next_chars = self.chars_at(start).take(128).peekable();
3330 let mut prev_chars = self.reversed_chars_at(start).take(128).peekable();
3331
3332 let classifier = self.char_classifier_at(start);
3333 let word_kind = cmp::max(
3334 prev_chars.peek().copied().map(|c| classifier.kind(c)),
3335 next_chars.peek().copied().map(|c| classifier.kind(c)),
3336 );
3337
3338 for ch in prev_chars {
3339 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3340 start -= ch.len_utf8();
3341 } else {
3342 break;
3343 }
3344 }
3345
3346 for ch in next_chars {
3347 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3348 end += ch.len_utf8();
3349 } else {
3350 break;
3351 }
3352 }
3353
3354 (start..end, word_kind)
3355 }
3356
3357 /// Returns the closest syntax node enclosing the given range.
3358 pub fn syntax_ancestor<'a, T: ToOffset>(
3359 &'a self,
3360 range: Range<T>,
3361 ) -> Option<tree_sitter::Node<'a>> {
3362 let range = range.start.to_offset(self)..range.end.to_offset(self);
3363 let mut result: Option<tree_sitter::Node<'a>> = None;
3364 'outer: for layer in self
3365 .syntax
3366 .layers_for_range(range.clone(), &self.text, true)
3367 {
3368 let mut cursor = layer.node().walk();
3369
3370 // Descend to the first leaf that touches the start of the range.
3371 //
3372 // If the range is non-empty and the current node ends exactly at the start,
3373 // move to the next sibling to find a node that extends beyond the start.
3374 //
3375 // If the range is empty and the current node starts after the range position,
3376 // move to the previous sibling to find the node that contains the position.
3377 while cursor.goto_first_child_for_byte(range.start).is_some() {
3378 if !range.is_empty() && cursor.node().end_byte() == range.start {
3379 cursor.goto_next_sibling();
3380 }
3381 if range.is_empty() && cursor.node().start_byte() > range.start {
3382 cursor.goto_previous_sibling();
3383 }
3384 }
3385
3386 // Ascend to the smallest ancestor that strictly contains the range.
3387 loop {
3388 let node_range = cursor.node().byte_range();
3389 if node_range.start <= range.start
3390 && node_range.end >= range.end
3391 && node_range.len() > range.len()
3392 {
3393 break;
3394 }
3395 if !cursor.goto_parent() {
3396 continue 'outer;
3397 }
3398 }
3399
3400 let left_node = cursor.node();
3401 let mut layer_result = left_node;
3402
3403 // For an empty range, try to find another node immediately to the right of the range.
3404 if left_node.end_byte() == range.start {
3405 let mut right_node = None;
3406 while !cursor.goto_next_sibling() {
3407 if !cursor.goto_parent() {
3408 break;
3409 }
3410 }
3411
3412 while cursor.node().start_byte() == range.start {
3413 right_node = Some(cursor.node());
3414 if !cursor.goto_first_child() {
3415 break;
3416 }
3417 }
3418
3419 // If there is a candidate node on both sides of the (empty) range, then
3420 // decide between the two by favoring a named node over an anonymous token.
3421 // If both nodes are the same in that regard, favor the right one.
3422 if let Some(right_node) = right_node {
3423 if right_node.is_named() || !left_node.is_named() {
3424 layer_result = right_node;
3425 }
3426 }
3427 }
3428
3429 if let Some(previous_result) = &result {
3430 if previous_result.byte_range().len() < layer_result.byte_range().len() {
3431 continue;
3432 }
3433 }
3434 result = Some(layer_result);
3435 }
3436
3437 result
3438 }
3439
3440 /// Returns the root syntax node within the given row
3441 pub fn syntax_root_ancestor(&self, position: Anchor) -> Option<tree_sitter::Node> {
3442 let start_offset = position.to_offset(self);
3443
3444 let row = self.summary_for_anchor::<text::PointUtf16>(&position).row as usize;
3445
3446 let layer = self
3447 .syntax
3448 .layers_for_range(start_offset..start_offset, &self.text, true)
3449 .next()?;
3450
3451 let mut cursor = layer.node().walk();
3452
3453 // Descend to the first leaf that touches the start of the range.
3454 while cursor.goto_first_child_for_byte(start_offset).is_some() {
3455 if cursor.node().end_byte() == start_offset {
3456 cursor.goto_next_sibling();
3457 }
3458 }
3459
3460 // Ascend to the root node within the same row.
3461 while cursor.goto_parent() {
3462 if cursor.node().start_position().row != row {
3463 break;
3464 }
3465 }
3466
3467 return Some(cursor.node());
3468 }
3469
3470 /// Returns the outline for the buffer.
3471 ///
3472 /// This method allows passing an optional [`SyntaxTheme`] to
3473 /// syntax-highlight the returned symbols.
3474 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Option<Outline<Anchor>> {
3475 self.outline_items_containing(0..self.len(), true, theme)
3476 .map(Outline::new)
3477 }
3478
3479 /// Returns all the symbols that contain the given position.
3480 ///
3481 /// This method allows passing an optional [`SyntaxTheme`] to
3482 /// syntax-highlight the returned symbols.
3483 pub fn symbols_containing<T: ToOffset>(
3484 &self,
3485 position: T,
3486 theme: Option<&SyntaxTheme>,
3487 ) -> Option<Vec<OutlineItem<Anchor>>> {
3488 let position = position.to_offset(self);
3489 let mut items = self.outline_items_containing(
3490 position.saturating_sub(1)..self.len().min(position + 1),
3491 false,
3492 theme,
3493 )?;
3494 let mut prev_depth = None;
3495 items.retain(|item| {
3496 let result = prev_depth.map_or(true, |prev_depth| item.depth > prev_depth);
3497 prev_depth = Some(item.depth);
3498 result
3499 });
3500 Some(items)
3501 }
3502
3503 pub fn outline_range_containing<T: ToOffset>(&self, range: Range<T>) -> Option<Range<Point>> {
3504 let range = range.to_offset(self);
3505 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3506 grammar.outline_config.as_ref().map(|c| &c.query)
3507 });
3508 let configs = matches
3509 .grammars()
3510 .iter()
3511 .map(|g| g.outline_config.as_ref().unwrap())
3512 .collect::<Vec<_>>();
3513
3514 while let Some(mat) = matches.peek() {
3515 let config = &configs[mat.grammar_index];
3516 let containing_item_node = maybe!({
3517 let item_node = mat.captures.iter().find_map(|cap| {
3518 if cap.index == config.item_capture_ix {
3519 Some(cap.node)
3520 } else {
3521 None
3522 }
3523 })?;
3524
3525 let item_byte_range = item_node.byte_range();
3526 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3527 None
3528 } else {
3529 Some(item_node)
3530 }
3531 });
3532
3533 if let Some(item_node) = containing_item_node {
3534 return Some(
3535 Point::from_ts_point(item_node.start_position())
3536 ..Point::from_ts_point(item_node.end_position()),
3537 );
3538 }
3539
3540 matches.advance();
3541 }
3542 None
3543 }
3544
3545 pub fn outline_items_containing<T: ToOffset>(
3546 &self,
3547 range: Range<T>,
3548 include_extra_context: bool,
3549 theme: Option<&SyntaxTheme>,
3550 ) -> Option<Vec<OutlineItem<Anchor>>> {
3551 let range = range.to_offset(self);
3552 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3553 grammar.outline_config.as_ref().map(|c| &c.query)
3554 });
3555 let configs = matches
3556 .grammars()
3557 .iter()
3558 .map(|g| g.outline_config.as_ref().unwrap())
3559 .collect::<Vec<_>>();
3560
3561 let mut items = Vec::new();
3562 let mut annotation_row_ranges: Vec<Range<u32>> = Vec::new();
3563 while let Some(mat) = matches.peek() {
3564 let config = &configs[mat.grammar_index];
3565 if let Some(item) =
3566 self.next_outline_item(config, &mat, &range, include_extra_context, theme)
3567 {
3568 items.push(item);
3569 } else if let Some(capture) = mat
3570 .captures
3571 .iter()
3572 .find(|capture| Some(capture.index) == config.annotation_capture_ix)
3573 {
3574 let capture_range = capture.node.start_position()..capture.node.end_position();
3575 let mut capture_row_range =
3576 capture_range.start.row as u32..capture_range.end.row as u32;
3577 if capture_range.end.row > capture_range.start.row && capture_range.end.column == 0
3578 {
3579 capture_row_range.end -= 1;
3580 }
3581 if let Some(last_row_range) = annotation_row_ranges.last_mut() {
3582 if last_row_range.end >= capture_row_range.start.saturating_sub(1) {
3583 last_row_range.end = capture_row_range.end;
3584 } else {
3585 annotation_row_ranges.push(capture_row_range);
3586 }
3587 } else {
3588 annotation_row_ranges.push(capture_row_range);
3589 }
3590 }
3591 matches.advance();
3592 }
3593
3594 items.sort_by_key(|item| (item.range.start, Reverse(item.range.end)));
3595
3596 // Assign depths based on containment relationships and convert to anchors.
3597 let mut item_ends_stack = Vec::<Point>::new();
3598 let mut anchor_items = Vec::new();
3599 let mut annotation_row_ranges = annotation_row_ranges.into_iter().peekable();
3600 for item in items {
3601 while let Some(last_end) = item_ends_stack.last().copied() {
3602 if last_end < item.range.end {
3603 item_ends_stack.pop();
3604 } else {
3605 break;
3606 }
3607 }
3608
3609 let mut annotation_row_range = None;
3610 while let Some(next_annotation_row_range) = annotation_row_ranges.peek() {
3611 let row_preceding_item = item.range.start.row.saturating_sub(1);
3612 if next_annotation_row_range.end < row_preceding_item {
3613 annotation_row_ranges.next();
3614 } else {
3615 if next_annotation_row_range.end == row_preceding_item {
3616 annotation_row_range = Some(next_annotation_row_range.clone());
3617 annotation_row_ranges.next();
3618 }
3619 break;
3620 }
3621 }
3622
3623 anchor_items.push(OutlineItem {
3624 depth: item_ends_stack.len(),
3625 range: self.anchor_after(item.range.start)..self.anchor_before(item.range.end),
3626 text: item.text,
3627 highlight_ranges: item.highlight_ranges,
3628 name_ranges: item.name_ranges,
3629 body_range: item.body_range.map(|body_range| {
3630 self.anchor_after(body_range.start)..self.anchor_before(body_range.end)
3631 }),
3632 annotation_range: annotation_row_range.map(|annotation_range| {
3633 self.anchor_after(Point::new(annotation_range.start, 0))
3634 ..self.anchor_before(Point::new(
3635 annotation_range.end,
3636 self.line_len(annotation_range.end),
3637 ))
3638 }),
3639 });
3640 item_ends_stack.push(item.range.end);
3641 }
3642
3643 Some(anchor_items)
3644 }
3645
3646 fn next_outline_item(
3647 &self,
3648 config: &OutlineConfig,
3649 mat: &SyntaxMapMatch,
3650 range: &Range<usize>,
3651 include_extra_context: bool,
3652 theme: Option<&SyntaxTheme>,
3653 ) -> Option<OutlineItem<Point>> {
3654 let item_node = mat.captures.iter().find_map(|cap| {
3655 if cap.index == config.item_capture_ix {
3656 Some(cap.node)
3657 } else {
3658 None
3659 }
3660 })?;
3661
3662 let item_byte_range = item_node.byte_range();
3663 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3664 return None;
3665 }
3666 let item_point_range = Point::from_ts_point(item_node.start_position())
3667 ..Point::from_ts_point(item_node.end_position());
3668
3669 let mut open_point = None;
3670 let mut close_point = None;
3671 let mut buffer_ranges = Vec::new();
3672 for capture in mat.captures {
3673 let node_is_name;
3674 if capture.index == config.name_capture_ix {
3675 node_is_name = true;
3676 } else if Some(capture.index) == config.context_capture_ix
3677 || (Some(capture.index) == config.extra_context_capture_ix && include_extra_context)
3678 {
3679 node_is_name = false;
3680 } else {
3681 if Some(capture.index) == config.open_capture_ix {
3682 open_point = Some(Point::from_ts_point(capture.node.end_position()));
3683 } else if Some(capture.index) == config.close_capture_ix {
3684 close_point = Some(Point::from_ts_point(capture.node.start_position()));
3685 }
3686
3687 continue;
3688 }
3689
3690 let mut range = capture.node.start_byte()..capture.node.end_byte();
3691 let start = capture.node.start_position();
3692 if capture.node.end_position().row > start.row {
3693 range.end = range.start + self.line_len(start.row as u32) as usize - start.column;
3694 }
3695
3696 if !range.is_empty() {
3697 buffer_ranges.push((range, node_is_name));
3698 }
3699 }
3700 if buffer_ranges.is_empty() {
3701 return None;
3702 }
3703 let mut text = String::new();
3704 let mut highlight_ranges = Vec::new();
3705 let mut name_ranges = Vec::new();
3706 let mut chunks = self.chunks(
3707 buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end,
3708 true,
3709 );
3710 let mut last_buffer_range_end = 0;
3711
3712 for (buffer_range, is_name) in buffer_ranges {
3713 let space_added = !text.is_empty() && buffer_range.start > last_buffer_range_end;
3714 if space_added {
3715 text.push(' ');
3716 }
3717 let before_append_len = text.len();
3718 let mut offset = buffer_range.start;
3719 chunks.seek(buffer_range.clone());
3720 for mut chunk in chunks.by_ref() {
3721 if chunk.text.len() > buffer_range.end - offset {
3722 chunk.text = &chunk.text[0..(buffer_range.end - offset)];
3723 offset = buffer_range.end;
3724 } else {
3725 offset += chunk.text.len();
3726 }
3727 let style = chunk
3728 .syntax_highlight_id
3729 .zip(theme)
3730 .and_then(|(highlight, theme)| highlight.style(theme));
3731 if let Some(style) = style {
3732 let start = text.len();
3733 let end = start + chunk.text.len();
3734 highlight_ranges.push((start..end, style));
3735 }
3736 text.push_str(chunk.text);
3737 if offset >= buffer_range.end {
3738 break;
3739 }
3740 }
3741 if is_name {
3742 let after_append_len = text.len();
3743 let start = if space_added && !name_ranges.is_empty() {
3744 before_append_len - 1
3745 } else {
3746 before_append_len
3747 };
3748 name_ranges.push(start..after_append_len);
3749 }
3750 last_buffer_range_end = buffer_range.end;
3751 }
3752
3753 Some(OutlineItem {
3754 depth: 0, // We'll calculate the depth later
3755 range: item_point_range,
3756 text,
3757 highlight_ranges,
3758 name_ranges,
3759 body_range: open_point.zip(close_point).map(|(start, end)| start..end),
3760 annotation_range: None,
3761 })
3762 }
3763
3764 pub fn function_body_fold_ranges<T: ToOffset>(
3765 &self,
3766 within: Range<T>,
3767 ) -> impl Iterator<Item = Range<usize>> + '_ {
3768 self.text_object_ranges(within, TreeSitterOptions::default())
3769 .filter_map(|(range, obj)| (obj == TextObject::InsideFunction).then_some(range))
3770 }
3771
3772 /// For each grammar in the language, runs the provided
3773 /// [`tree_sitter::Query`] against the given range.
3774 pub fn matches(
3775 &self,
3776 range: Range<usize>,
3777 query: fn(&Grammar) -> Option<&tree_sitter::Query>,
3778 ) -> SyntaxMapMatches {
3779 self.syntax.matches(range, self, query)
3780 }
3781
3782 pub fn all_bracket_ranges(
3783 &self,
3784 range: Range<usize>,
3785 ) -> impl Iterator<Item = BracketMatch> + '_ {
3786 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3787 grammar.brackets_config.as_ref().map(|c| &c.query)
3788 });
3789 let configs = matches
3790 .grammars()
3791 .iter()
3792 .map(|grammar| grammar.brackets_config.as_ref().unwrap())
3793 .collect::<Vec<_>>();
3794
3795 iter::from_fn(move || {
3796 while let Some(mat) = matches.peek() {
3797 let mut open = None;
3798 let mut close = None;
3799 let config = &configs[mat.grammar_index];
3800 let pattern = &config.patterns[mat.pattern_index];
3801 for capture in mat.captures {
3802 if capture.index == config.open_capture_ix {
3803 open = Some(capture.node.byte_range());
3804 } else if capture.index == config.close_capture_ix {
3805 close = Some(capture.node.byte_range());
3806 }
3807 }
3808
3809 matches.advance();
3810
3811 let Some((open_range, close_range)) = open.zip(close) else {
3812 continue;
3813 };
3814
3815 let bracket_range = open_range.start..=close_range.end;
3816 if !bracket_range.overlaps(&range) {
3817 continue;
3818 }
3819
3820 return Some(BracketMatch {
3821 open_range,
3822 close_range,
3823 newline_only: pattern.newline_only,
3824 });
3825 }
3826 None
3827 })
3828 }
3829
3830 /// Returns bracket range pairs overlapping or adjacent to `range`
3831 pub fn bracket_ranges<T: ToOffset>(
3832 &self,
3833 range: Range<T>,
3834 ) -> impl Iterator<Item = BracketMatch> + '_ {
3835 // Find bracket pairs that *inclusively* contain the given range.
3836 let range = range.start.to_offset(self).saturating_sub(1)
3837 ..self.len().min(range.end.to_offset(self) + 1);
3838 self.all_bracket_ranges(range)
3839 .filter(|pair| !pair.newline_only)
3840 }
3841
3842 pub fn text_object_ranges<T: ToOffset>(
3843 &self,
3844 range: Range<T>,
3845 options: TreeSitterOptions,
3846 ) -> impl Iterator<Item = (Range<usize>, TextObject)> + '_ {
3847 let range = range.start.to_offset(self).saturating_sub(1)
3848 ..self.len().min(range.end.to_offset(self) + 1);
3849
3850 let mut matches =
3851 self.syntax
3852 .matches_with_options(range.clone(), &self.text, options, |grammar| {
3853 grammar.text_object_config.as_ref().map(|c| &c.query)
3854 });
3855
3856 let configs = matches
3857 .grammars()
3858 .iter()
3859 .map(|grammar| grammar.text_object_config.as_ref())
3860 .collect::<Vec<_>>();
3861
3862 let mut captures = Vec::<(Range<usize>, TextObject)>::new();
3863
3864 iter::from_fn(move || {
3865 loop {
3866 while let Some(capture) = captures.pop() {
3867 if capture.0.overlaps(&range) {
3868 return Some(capture);
3869 }
3870 }
3871
3872 let mat = matches.peek()?;
3873
3874 let Some(config) = configs[mat.grammar_index].as_ref() else {
3875 matches.advance();
3876 continue;
3877 };
3878
3879 for capture in mat.captures {
3880 let Some(ix) = config
3881 .text_objects_by_capture_ix
3882 .binary_search_by_key(&capture.index, |e| e.0)
3883 .ok()
3884 else {
3885 continue;
3886 };
3887 let text_object = config.text_objects_by_capture_ix[ix].1;
3888 let byte_range = capture.node.byte_range();
3889
3890 let mut found = false;
3891 for (range, existing) in captures.iter_mut() {
3892 if existing == &text_object {
3893 range.start = range.start.min(byte_range.start);
3894 range.end = range.end.max(byte_range.end);
3895 found = true;
3896 break;
3897 }
3898 }
3899
3900 if !found {
3901 captures.push((byte_range, text_object));
3902 }
3903 }
3904
3905 matches.advance();
3906 }
3907 })
3908 }
3909
3910 /// Returns enclosing bracket ranges containing the given range
3911 pub fn enclosing_bracket_ranges<T: ToOffset>(
3912 &self,
3913 range: Range<T>,
3914 ) -> impl Iterator<Item = BracketMatch> + '_ {
3915 let range = range.start.to_offset(self)..range.end.to_offset(self);
3916
3917 self.bracket_ranges(range.clone()).filter(move |pair| {
3918 pair.open_range.start <= range.start && pair.close_range.end >= range.end
3919 })
3920 }
3921
3922 /// Returns the smallest enclosing bracket ranges containing the given range or None if no brackets contain range
3923 ///
3924 /// Can optionally pass a range_filter to filter the ranges of brackets to consider
3925 pub fn innermost_enclosing_bracket_ranges<T: ToOffset>(
3926 &self,
3927 range: Range<T>,
3928 range_filter: Option<&dyn Fn(Range<usize>, Range<usize>) -> bool>,
3929 ) -> Option<(Range<usize>, Range<usize>)> {
3930 let range = range.start.to_offset(self)..range.end.to_offset(self);
3931
3932 // Get the ranges of the innermost pair of brackets.
3933 let mut result: Option<(Range<usize>, Range<usize>)> = None;
3934
3935 for pair in self.enclosing_bracket_ranges(range.clone()) {
3936 if let Some(range_filter) = range_filter {
3937 if !range_filter(pair.open_range.clone(), pair.close_range.clone()) {
3938 continue;
3939 }
3940 }
3941
3942 let len = pair.close_range.end - pair.open_range.start;
3943
3944 if let Some((existing_open, existing_close)) = &result {
3945 let existing_len = existing_close.end - existing_open.start;
3946 if len > existing_len {
3947 continue;
3948 }
3949 }
3950
3951 result = Some((pair.open_range, pair.close_range));
3952 }
3953
3954 result
3955 }
3956
3957 /// Returns anchor ranges for any matches of the redaction query.
3958 /// The buffer can be associated with multiple languages, and the redaction query associated with each
3959 /// will be run on the relevant section of the buffer.
3960 pub fn redacted_ranges<T: ToOffset>(
3961 &self,
3962 range: Range<T>,
3963 ) -> impl Iterator<Item = Range<usize>> + '_ {
3964 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
3965 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
3966 grammar
3967 .redactions_config
3968 .as_ref()
3969 .map(|config| &config.query)
3970 });
3971
3972 let configs = syntax_matches
3973 .grammars()
3974 .iter()
3975 .map(|grammar| grammar.redactions_config.as_ref())
3976 .collect::<Vec<_>>();
3977
3978 iter::from_fn(move || {
3979 let redacted_range = syntax_matches
3980 .peek()
3981 .and_then(|mat| {
3982 configs[mat.grammar_index].and_then(|config| {
3983 mat.captures
3984 .iter()
3985 .find(|capture| capture.index == config.redaction_capture_ix)
3986 })
3987 })
3988 .map(|mat| mat.node.byte_range());
3989 syntax_matches.advance();
3990 redacted_range
3991 })
3992 }
3993
3994 pub fn injections_intersecting_range<T: ToOffset>(
3995 &self,
3996 range: Range<T>,
3997 ) -> impl Iterator<Item = (Range<usize>, &Arc<Language>)> + '_ {
3998 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
3999
4000 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4001 grammar
4002 .injection_config
4003 .as_ref()
4004 .map(|config| &config.query)
4005 });
4006
4007 let configs = syntax_matches
4008 .grammars()
4009 .iter()
4010 .map(|grammar| grammar.injection_config.as_ref())
4011 .collect::<Vec<_>>();
4012
4013 iter::from_fn(move || {
4014 let ranges = syntax_matches.peek().and_then(|mat| {
4015 let config = &configs[mat.grammar_index]?;
4016 let content_capture_range = mat.captures.iter().find_map(|capture| {
4017 if capture.index == config.content_capture_ix {
4018 Some(capture.node.byte_range())
4019 } else {
4020 None
4021 }
4022 })?;
4023 let language = self.language_at(content_capture_range.start)?;
4024 Some((content_capture_range, language))
4025 });
4026 syntax_matches.advance();
4027 ranges
4028 })
4029 }
4030
4031 pub fn runnable_ranges(
4032 &self,
4033 offset_range: Range<usize>,
4034 ) -> impl Iterator<Item = RunnableRange> + '_ {
4035 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4036 grammar.runnable_config.as_ref().map(|config| &config.query)
4037 });
4038
4039 let test_configs = syntax_matches
4040 .grammars()
4041 .iter()
4042 .map(|grammar| grammar.runnable_config.as_ref())
4043 .collect::<Vec<_>>();
4044
4045 iter::from_fn(move || {
4046 loop {
4047 let mat = syntax_matches.peek()?;
4048
4049 let test_range = test_configs[mat.grammar_index].and_then(|test_configs| {
4050 let mut run_range = None;
4051 let full_range = mat.captures.iter().fold(
4052 Range {
4053 start: usize::MAX,
4054 end: 0,
4055 },
4056 |mut acc, next| {
4057 let byte_range = next.node.byte_range();
4058 if acc.start > byte_range.start {
4059 acc.start = byte_range.start;
4060 }
4061 if acc.end < byte_range.end {
4062 acc.end = byte_range.end;
4063 }
4064 acc
4065 },
4066 );
4067 if full_range.start > full_range.end {
4068 // We did not find a full spanning range of this match.
4069 return None;
4070 }
4071 let extra_captures: SmallVec<[_; 1]> =
4072 SmallVec::from_iter(mat.captures.iter().filter_map(|capture| {
4073 test_configs
4074 .extra_captures
4075 .get(capture.index as usize)
4076 .cloned()
4077 .and_then(|tag_name| match tag_name {
4078 RunnableCapture::Named(name) => {
4079 Some((capture.node.byte_range(), name))
4080 }
4081 RunnableCapture::Run => {
4082 let _ = run_range.insert(capture.node.byte_range());
4083 None
4084 }
4085 })
4086 }));
4087 let run_range = run_range?;
4088 let tags = test_configs
4089 .query
4090 .property_settings(mat.pattern_index)
4091 .iter()
4092 .filter_map(|property| {
4093 if *property.key == *"tag" {
4094 property
4095 .value
4096 .as_ref()
4097 .map(|value| RunnableTag(value.to_string().into()))
4098 } else {
4099 None
4100 }
4101 })
4102 .collect();
4103 let extra_captures = extra_captures
4104 .into_iter()
4105 .map(|(range, name)| {
4106 (
4107 name.to_string(),
4108 self.text_for_range(range.clone()).collect::<String>(),
4109 )
4110 })
4111 .collect();
4112 // All tags should have the same range.
4113 Some(RunnableRange {
4114 run_range,
4115 full_range,
4116 runnable: Runnable {
4117 tags,
4118 language: mat.language,
4119 buffer: self.remote_id(),
4120 },
4121 extra_captures,
4122 buffer_id: self.remote_id(),
4123 })
4124 });
4125
4126 syntax_matches.advance();
4127 if test_range.is_some() {
4128 // It's fine for us to short-circuit on .peek()? returning None. We don't want to return None from this iter if we
4129 // had a capture that did not contain a run marker, hence we'll just loop around for the next capture.
4130 return test_range;
4131 }
4132 }
4133 })
4134 }
4135
4136 /// Returns selections for remote peers intersecting the given range.
4137 #[allow(clippy::type_complexity)]
4138 pub fn selections_in_range(
4139 &self,
4140 range: Range<Anchor>,
4141 include_local: bool,
4142 ) -> impl Iterator<
4143 Item = (
4144 ReplicaId,
4145 bool,
4146 CursorShape,
4147 impl Iterator<Item = &Selection<Anchor>> + '_,
4148 ),
4149 > + '_ {
4150 self.remote_selections
4151 .iter()
4152 .filter(move |(replica_id, set)| {
4153 (include_local || **replica_id != self.text.replica_id())
4154 && !set.selections.is_empty()
4155 })
4156 .map(move |(replica_id, set)| {
4157 let start_ix = match set.selections.binary_search_by(|probe| {
4158 probe.end.cmp(&range.start, self).then(Ordering::Greater)
4159 }) {
4160 Ok(ix) | Err(ix) => ix,
4161 };
4162 let end_ix = match set.selections.binary_search_by(|probe| {
4163 probe.start.cmp(&range.end, self).then(Ordering::Less)
4164 }) {
4165 Ok(ix) | Err(ix) => ix,
4166 };
4167
4168 (
4169 *replica_id,
4170 set.line_mode,
4171 set.cursor_shape,
4172 set.selections[start_ix..end_ix].iter(),
4173 )
4174 })
4175 }
4176
4177 /// Returns if the buffer contains any diagnostics.
4178 pub fn has_diagnostics(&self) -> bool {
4179 !self.diagnostics.is_empty()
4180 }
4181
4182 /// Returns all the diagnostics intersecting the given range.
4183 pub fn diagnostics_in_range<'a, T, O>(
4184 &'a self,
4185 search_range: Range<T>,
4186 reversed: bool,
4187 ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
4188 where
4189 T: 'a + Clone + ToOffset,
4190 O: 'a + FromAnchor,
4191 {
4192 let mut iterators: Vec<_> = self
4193 .diagnostics
4194 .iter()
4195 .map(|(_, collection)| {
4196 collection
4197 .range::<T, text::Anchor>(search_range.clone(), self, true, reversed)
4198 .peekable()
4199 })
4200 .collect();
4201
4202 std::iter::from_fn(move || {
4203 let (next_ix, _) = iterators
4204 .iter_mut()
4205 .enumerate()
4206 .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
4207 .min_by(|(_, a), (_, b)| {
4208 let cmp = a
4209 .range
4210 .start
4211 .cmp(&b.range.start, self)
4212 // when range is equal, sort by diagnostic severity
4213 .then(a.diagnostic.severity.cmp(&b.diagnostic.severity))
4214 // and stabilize order with group_id
4215 .then(a.diagnostic.group_id.cmp(&b.diagnostic.group_id));
4216 if reversed { cmp.reverse() } else { cmp }
4217 })?;
4218 iterators[next_ix]
4219 .next()
4220 .map(|DiagnosticEntry { range, diagnostic }| DiagnosticEntry {
4221 diagnostic,
4222 range: FromAnchor::from_anchor(&range.start, self)
4223 ..FromAnchor::from_anchor(&range.end, self),
4224 })
4225 })
4226 }
4227
4228 /// Returns all the diagnostic groups associated with the given
4229 /// language server ID. If no language server ID is provided,
4230 /// all diagnostics groups are returned.
4231 pub fn diagnostic_groups(
4232 &self,
4233 language_server_id: Option<LanguageServerId>,
4234 ) -> Vec<(LanguageServerId, DiagnosticGroup<Anchor>)> {
4235 let mut groups = Vec::new();
4236
4237 if let Some(language_server_id) = language_server_id {
4238 if let Ok(ix) = self
4239 .diagnostics
4240 .binary_search_by_key(&language_server_id, |e| e.0)
4241 {
4242 self.diagnostics[ix]
4243 .1
4244 .groups(language_server_id, &mut groups, self);
4245 }
4246 } else {
4247 for (language_server_id, diagnostics) in self.diagnostics.iter() {
4248 diagnostics.groups(*language_server_id, &mut groups, self);
4249 }
4250 }
4251
4252 groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
4253 let a_start = &group_a.entries[group_a.primary_ix].range.start;
4254 let b_start = &group_b.entries[group_b.primary_ix].range.start;
4255 a_start.cmp(b_start, self).then_with(|| id_a.cmp(id_b))
4256 });
4257
4258 groups
4259 }
4260
4261 /// Returns an iterator over the diagnostics for the given group.
4262 pub fn diagnostic_group<O>(
4263 &self,
4264 group_id: usize,
4265 ) -> impl Iterator<Item = DiagnosticEntry<O>> + '_
4266 where
4267 O: FromAnchor + 'static,
4268 {
4269 self.diagnostics
4270 .iter()
4271 .flat_map(move |(_, set)| set.group(group_id, self))
4272 }
4273
4274 /// An integer version number that accounts for all updates besides
4275 /// the buffer's text itself (which is versioned via a version vector).
4276 pub fn non_text_state_update_count(&self) -> usize {
4277 self.non_text_state_update_count
4278 }
4279
4280 /// Returns a snapshot of underlying file.
4281 pub fn file(&self) -> Option<&Arc<dyn File>> {
4282 self.file.as_ref()
4283 }
4284
4285 /// Resolves the file path (relative to the worktree root) associated with the underlying file.
4286 pub fn resolve_file_path(&self, cx: &App, include_root: bool) -> Option<PathBuf> {
4287 if let Some(file) = self.file() {
4288 if file.path().file_name().is_none() || include_root {
4289 Some(file.full_path(cx))
4290 } else {
4291 Some(file.path().to_path_buf())
4292 }
4293 } else {
4294 None
4295 }
4296 }
4297
4298 pub fn words_in_range(&self, query: WordsQuery) -> BTreeMap<String, Range<Anchor>> {
4299 let query_str = query.fuzzy_contents;
4300 if query_str.map_or(false, |query| query.is_empty()) {
4301 return BTreeMap::default();
4302 }
4303
4304 let classifier = CharClassifier::new(self.language.clone().map(|language| LanguageScope {
4305 language,
4306 override_id: None,
4307 }));
4308
4309 let mut query_ix = 0;
4310 let query_chars = query_str.map(|query| query.chars().collect::<Vec<_>>());
4311 let query_len = query_chars.as_ref().map_or(0, |query| query.len());
4312
4313 let mut words = BTreeMap::default();
4314 let mut current_word_start_ix = None;
4315 let mut chunk_ix = query.range.start;
4316 for chunk in self.chunks(query.range, false) {
4317 for (i, c) in chunk.text.char_indices() {
4318 let ix = chunk_ix + i;
4319 if classifier.is_word(c) {
4320 if current_word_start_ix.is_none() {
4321 current_word_start_ix = Some(ix);
4322 }
4323
4324 if let Some(query_chars) = &query_chars {
4325 if query_ix < query_len {
4326 if c.to_lowercase().eq(query_chars[query_ix].to_lowercase()) {
4327 query_ix += 1;
4328 }
4329 }
4330 }
4331 continue;
4332 } else if let Some(word_start) = current_word_start_ix.take() {
4333 if query_ix == query_len {
4334 let word_range = self.anchor_before(word_start)..self.anchor_after(ix);
4335 let mut word_text = self.text_for_range(word_start..ix).peekable();
4336 let first_char = word_text
4337 .peek()
4338 .and_then(|first_chunk| first_chunk.chars().next());
4339 // Skip empty and "words" starting with digits as a heuristic to reduce useless completions
4340 if !query.skip_digits
4341 || first_char.map_or(true, |first_char| !first_char.is_digit(10))
4342 {
4343 words.insert(word_text.collect(), word_range);
4344 }
4345 }
4346 }
4347 query_ix = 0;
4348 }
4349 chunk_ix += chunk.text.len();
4350 }
4351
4352 words
4353 }
4354}
4355
4356pub struct WordsQuery<'a> {
4357 /// Only returns words with all chars from the fuzzy string in them.
4358 pub fuzzy_contents: Option<&'a str>,
4359 /// Skips words that start with a digit.
4360 pub skip_digits: bool,
4361 /// Buffer offset range, to look for words.
4362 pub range: Range<usize>,
4363}
4364
4365fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
4366 indent_size_for_text(text.chars_at(Point::new(row, 0)))
4367}
4368
4369fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
4370 let mut result = IndentSize::spaces(0);
4371 for c in text {
4372 let kind = match c {
4373 ' ' => IndentKind::Space,
4374 '\t' => IndentKind::Tab,
4375 _ => break,
4376 };
4377 if result.len == 0 {
4378 result.kind = kind;
4379 }
4380 result.len += 1;
4381 }
4382 result
4383}
4384
4385impl Clone for BufferSnapshot {
4386 fn clone(&self) -> Self {
4387 Self {
4388 text: self.text.clone(),
4389 syntax: self.syntax.clone(),
4390 file: self.file.clone(),
4391 remote_selections: self.remote_selections.clone(),
4392 diagnostics: self.diagnostics.clone(),
4393 language: self.language.clone(),
4394 non_text_state_update_count: self.non_text_state_update_count,
4395 }
4396 }
4397}
4398
4399impl Deref for BufferSnapshot {
4400 type Target = text::BufferSnapshot;
4401
4402 fn deref(&self) -> &Self::Target {
4403 &self.text
4404 }
4405}
4406
4407unsafe impl Send for BufferChunks<'_> {}
4408
4409impl<'a> BufferChunks<'a> {
4410 pub(crate) fn new(
4411 text: &'a Rope,
4412 range: Range<usize>,
4413 syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
4414 diagnostics: bool,
4415 buffer_snapshot: Option<&'a BufferSnapshot>,
4416 ) -> Self {
4417 let mut highlights = None;
4418 if let Some((captures, highlight_maps)) = syntax {
4419 highlights = Some(BufferChunkHighlights {
4420 captures,
4421 next_capture: None,
4422 stack: Default::default(),
4423 highlight_maps,
4424 })
4425 }
4426
4427 let diagnostic_endpoints = diagnostics.then(|| Vec::new().into_iter().peekable());
4428 let chunks = text.chunks_in_range(range.clone());
4429
4430 let mut this = BufferChunks {
4431 range,
4432 buffer_snapshot,
4433 chunks,
4434 diagnostic_endpoints,
4435 error_depth: 0,
4436 warning_depth: 0,
4437 information_depth: 0,
4438 hint_depth: 0,
4439 unnecessary_depth: 0,
4440 underline: true,
4441 highlights,
4442 };
4443 this.initialize_diagnostic_endpoints();
4444 this
4445 }
4446
4447 /// Seeks to the given byte offset in the buffer.
4448 pub fn seek(&mut self, range: Range<usize>) {
4449 let old_range = std::mem::replace(&mut self.range, range.clone());
4450 self.chunks.set_range(self.range.clone());
4451 if let Some(highlights) = self.highlights.as_mut() {
4452 if old_range.start <= self.range.start && old_range.end >= self.range.end {
4453 // Reuse existing highlights stack, as the new range is a subrange of the old one.
4454 highlights
4455 .stack
4456 .retain(|(end_offset, _)| *end_offset > range.start);
4457 if let Some(capture) = &highlights.next_capture {
4458 if range.start >= capture.node.start_byte() {
4459 let next_capture_end = capture.node.end_byte();
4460 if range.start < next_capture_end {
4461 highlights.stack.push((
4462 next_capture_end,
4463 highlights.highlight_maps[capture.grammar_index].get(capture.index),
4464 ));
4465 }
4466 highlights.next_capture.take();
4467 }
4468 }
4469 } else if let Some(snapshot) = self.buffer_snapshot {
4470 let (captures, highlight_maps) = snapshot.get_highlights(self.range.clone());
4471 *highlights = BufferChunkHighlights {
4472 captures,
4473 next_capture: None,
4474 stack: Default::default(),
4475 highlight_maps,
4476 };
4477 } else {
4478 // We cannot obtain new highlights for a language-aware buffer iterator, as we don't have a buffer snapshot.
4479 // Seeking such BufferChunks is not supported.
4480 debug_assert!(
4481 false,
4482 "Attempted to seek on a language-aware buffer iterator without associated buffer snapshot"
4483 );
4484 }
4485
4486 highlights.captures.set_byte_range(self.range.clone());
4487 self.initialize_diagnostic_endpoints();
4488 }
4489 }
4490
4491 fn initialize_diagnostic_endpoints(&mut self) {
4492 if let Some(diagnostics) = self.diagnostic_endpoints.as_mut() {
4493 if let Some(buffer) = self.buffer_snapshot {
4494 let mut diagnostic_endpoints = Vec::new();
4495 for entry in buffer.diagnostics_in_range::<_, usize>(self.range.clone(), false) {
4496 diagnostic_endpoints.push(DiagnosticEndpoint {
4497 offset: entry.range.start,
4498 is_start: true,
4499 severity: entry.diagnostic.severity,
4500 is_unnecessary: entry.diagnostic.is_unnecessary,
4501 underline: entry.diagnostic.underline,
4502 });
4503 diagnostic_endpoints.push(DiagnosticEndpoint {
4504 offset: entry.range.end,
4505 is_start: false,
4506 severity: entry.diagnostic.severity,
4507 is_unnecessary: entry.diagnostic.is_unnecessary,
4508 underline: entry.diagnostic.underline,
4509 });
4510 }
4511 diagnostic_endpoints
4512 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
4513 *diagnostics = diagnostic_endpoints.into_iter().peekable();
4514 self.hint_depth = 0;
4515 self.error_depth = 0;
4516 self.warning_depth = 0;
4517 self.information_depth = 0;
4518 }
4519 }
4520 }
4521
4522 /// The current byte offset in the buffer.
4523 pub fn offset(&self) -> usize {
4524 self.range.start
4525 }
4526
4527 pub fn range(&self) -> Range<usize> {
4528 self.range.clone()
4529 }
4530
4531 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
4532 let depth = match endpoint.severity {
4533 DiagnosticSeverity::ERROR => &mut self.error_depth,
4534 DiagnosticSeverity::WARNING => &mut self.warning_depth,
4535 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
4536 DiagnosticSeverity::HINT => &mut self.hint_depth,
4537 _ => return,
4538 };
4539 if endpoint.is_start {
4540 *depth += 1;
4541 } else {
4542 *depth -= 1;
4543 }
4544
4545 if endpoint.is_unnecessary {
4546 if endpoint.is_start {
4547 self.unnecessary_depth += 1;
4548 } else {
4549 self.unnecessary_depth -= 1;
4550 }
4551 }
4552 }
4553
4554 fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
4555 if self.error_depth > 0 {
4556 Some(DiagnosticSeverity::ERROR)
4557 } else if self.warning_depth > 0 {
4558 Some(DiagnosticSeverity::WARNING)
4559 } else if self.information_depth > 0 {
4560 Some(DiagnosticSeverity::INFORMATION)
4561 } else if self.hint_depth > 0 {
4562 Some(DiagnosticSeverity::HINT)
4563 } else {
4564 None
4565 }
4566 }
4567
4568 fn current_code_is_unnecessary(&self) -> bool {
4569 self.unnecessary_depth > 0
4570 }
4571}
4572
4573impl<'a> Iterator for BufferChunks<'a> {
4574 type Item = Chunk<'a>;
4575
4576 fn next(&mut self) -> Option<Self::Item> {
4577 let mut next_capture_start = usize::MAX;
4578 let mut next_diagnostic_endpoint = usize::MAX;
4579
4580 if let Some(highlights) = self.highlights.as_mut() {
4581 while let Some((parent_capture_end, _)) = highlights.stack.last() {
4582 if *parent_capture_end <= self.range.start {
4583 highlights.stack.pop();
4584 } else {
4585 break;
4586 }
4587 }
4588
4589 if highlights.next_capture.is_none() {
4590 highlights.next_capture = highlights.captures.next();
4591 }
4592
4593 while let Some(capture) = highlights.next_capture.as_ref() {
4594 if self.range.start < capture.node.start_byte() {
4595 next_capture_start = capture.node.start_byte();
4596 break;
4597 } else {
4598 let highlight_id =
4599 highlights.highlight_maps[capture.grammar_index].get(capture.index);
4600 highlights
4601 .stack
4602 .push((capture.node.end_byte(), highlight_id));
4603 highlights.next_capture = highlights.captures.next();
4604 }
4605 }
4606 }
4607
4608 let mut diagnostic_endpoints = std::mem::take(&mut self.diagnostic_endpoints);
4609 if let Some(diagnostic_endpoints) = diagnostic_endpoints.as_mut() {
4610 while let Some(endpoint) = diagnostic_endpoints.peek().copied() {
4611 if endpoint.offset <= self.range.start {
4612 self.update_diagnostic_depths(endpoint);
4613 diagnostic_endpoints.next();
4614 self.underline = endpoint.underline;
4615 } else {
4616 next_diagnostic_endpoint = endpoint.offset;
4617 break;
4618 }
4619 }
4620 }
4621 self.diagnostic_endpoints = diagnostic_endpoints;
4622
4623 if let Some(chunk) = self.chunks.peek() {
4624 let chunk_start = self.range.start;
4625 let mut chunk_end = (self.chunks.offset() + chunk.len())
4626 .min(next_capture_start)
4627 .min(next_diagnostic_endpoint);
4628 let mut highlight_id = None;
4629 if let Some(highlights) = self.highlights.as_ref() {
4630 if let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last() {
4631 chunk_end = chunk_end.min(*parent_capture_end);
4632 highlight_id = Some(*parent_highlight_id);
4633 }
4634 }
4635
4636 let slice =
4637 &chunk[chunk_start - self.chunks.offset()..chunk_end - self.chunks.offset()];
4638 self.range.start = chunk_end;
4639 if self.range.start == self.chunks.offset() + chunk.len() {
4640 self.chunks.next().unwrap();
4641 }
4642
4643 Some(Chunk {
4644 text: slice,
4645 syntax_highlight_id: highlight_id,
4646 underline: self.underline,
4647 diagnostic_severity: self.current_diagnostic_severity(),
4648 is_unnecessary: self.current_code_is_unnecessary(),
4649 ..Chunk::default()
4650 })
4651 } else {
4652 None
4653 }
4654 }
4655}
4656
4657impl operation_queue::Operation for Operation {
4658 fn lamport_timestamp(&self) -> clock::Lamport {
4659 match self {
4660 Operation::Buffer(_) => {
4661 unreachable!("buffer operations should never be deferred at this layer")
4662 }
4663 Operation::UpdateDiagnostics {
4664 lamport_timestamp, ..
4665 }
4666 | Operation::UpdateSelections {
4667 lamport_timestamp, ..
4668 }
4669 | Operation::UpdateCompletionTriggers {
4670 lamport_timestamp, ..
4671 } => *lamport_timestamp,
4672 }
4673 }
4674}
4675
4676impl Default for Diagnostic {
4677 fn default() -> Self {
4678 Self {
4679 source: Default::default(),
4680 source_kind: DiagnosticSourceKind::Other,
4681 code: None,
4682 code_description: None,
4683 severity: DiagnosticSeverity::ERROR,
4684 message: Default::default(),
4685 markdown: None,
4686 group_id: 0,
4687 is_primary: false,
4688 is_disk_based: false,
4689 is_unnecessary: false,
4690 underline: true,
4691 data: None,
4692 }
4693 }
4694}
4695
4696impl IndentSize {
4697 /// Returns an [`IndentSize`] representing the given spaces.
4698 pub fn spaces(len: u32) -> Self {
4699 Self {
4700 len,
4701 kind: IndentKind::Space,
4702 }
4703 }
4704
4705 /// Returns an [`IndentSize`] representing a tab.
4706 pub fn tab() -> Self {
4707 Self {
4708 len: 1,
4709 kind: IndentKind::Tab,
4710 }
4711 }
4712
4713 /// An iterator over the characters represented by this [`IndentSize`].
4714 pub fn chars(&self) -> impl Iterator<Item = char> {
4715 iter::repeat(self.char()).take(self.len as usize)
4716 }
4717
4718 /// The character representation of this [`IndentSize`].
4719 pub fn char(&self) -> char {
4720 match self.kind {
4721 IndentKind::Space => ' ',
4722 IndentKind::Tab => '\t',
4723 }
4724 }
4725
4726 /// Consumes the current [`IndentSize`] and returns a new one that has
4727 /// been shrunk or enlarged by the given size along the given direction.
4728 pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
4729 match direction {
4730 Ordering::Less => {
4731 if self.kind == size.kind && self.len >= size.len {
4732 self.len -= size.len;
4733 }
4734 }
4735 Ordering::Equal => {}
4736 Ordering::Greater => {
4737 if self.len == 0 {
4738 self = size;
4739 } else if self.kind == size.kind {
4740 self.len += size.len;
4741 }
4742 }
4743 }
4744 self
4745 }
4746
4747 pub fn len_with_expanded_tabs(&self, tab_size: NonZeroU32) -> usize {
4748 match self.kind {
4749 IndentKind::Space => self.len as usize,
4750 IndentKind::Tab => self.len as usize * tab_size.get() as usize,
4751 }
4752 }
4753}
4754
4755#[cfg(any(test, feature = "test-support"))]
4756pub struct TestFile {
4757 pub path: Arc<Path>,
4758 pub root_name: String,
4759 pub local_root: Option<PathBuf>,
4760}
4761
4762#[cfg(any(test, feature = "test-support"))]
4763impl File for TestFile {
4764 fn path(&self) -> &Arc<Path> {
4765 &self.path
4766 }
4767
4768 fn full_path(&self, _: &gpui::App) -> PathBuf {
4769 PathBuf::from(&self.root_name).join(self.path.as_ref())
4770 }
4771
4772 fn as_local(&self) -> Option<&dyn LocalFile> {
4773 if self.local_root.is_some() {
4774 Some(self)
4775 } else {
4776 None
4777 }
4778 }
4779
4780 fn disk_state(&self) -> DiskState {
4781 unimplemented!()
4782 }
4783
4784 fn file_name<'a>(&'a self, _: &'a gpui::App) -> &'a std::ffi::OsStr {
4785 self.path().file_name().unwrap_or(self.root_name.as_ref())
4786 }
4787
4788 fn worktree_id(&self, _: &App) -> WorktreeId {
4789 WorktreeId::from_usize(0)
4790 }
4791
4792 fn to_proto(&self, _: &App) -> rpc::proto::File {
4793 unimplemented!()
4794 }
4795
4796 fn is_private(&self) -> bool {
4797 false
4798 }
4799}
4800
4801#[cfg(any(test, feature = "test-support"))]
4802impl LocalFile for TestFile {
4803 fn abs_path(&self, _cx: &App) -> PathBuf {
4804 PathBuf::from(self.local_root.as_ref().unwrap())
4805 .join(&self.root_name)
4806 .join(self.path.as_ref())
4807 }
4808
4809 fn load(&self, _cx: &App) -> Task<Result<String>> {
4810 unimplemented!()
4811 }
4812
4813 fn load_bytes(&self, _cx: &App) -> Task<Result<Vec<u8>>> {
4814 unimplemented!()
4815 }
4816}
4817
4818pub(crate) fn contiguous_ranges(
4819 values: impl Iterator<Item = u32>,
4820 max_len: usize,
4821) -> impl Iterator<Item = Range<u32>> {
4822 let mut values = values;
4823 let mut current_range: Option<Range<u32>> = None;
4824 std::iter::from_fn(move || {
4825 loop {
4826 if let Some(value) = values.next() {
4827 if let Some(range) = &mut current_range {
4828 if value == range.end && range.len() < max_len {
4829 range.end += 1;
4830 continue;
4831 }
4832 }
4833
4834 let prev_range = current_range.clone();
4835 current_range = Some(value..(value + 1));
4836 if prev_range.is_some() {
4837 return prev_range;
4838 }
4839 } else {
4840 return current_range.take();
4841 }
4842 }
4843 })
4844}
4845
4846#[derive(Default, Debug)]
4847pub struct CharClassifier {
4848 scope: Option<LanguageScope>,
4849 for_completion: bool,
4850 ignore_punctuation: bool,
4851}
4852
4853impl CharClassifier {
4854 pub fn new(scope: Option<LanguageScope>) -> Self {
4855 Self {
4856 scope,
4857 for_completion: false,
4858 ignore_punctuation: false,
4859 }
4860 }
4861
4862 pub fn for_completion(self, for_completion: bool) -> Self {
4863 Self {
4864 for_completion,
4865 ..self
4866 }
4867 }
4868
4869 pub fn ignore_punctuation(self, ignore_punctuation: bool) -> Self {
4870 Self {
4871 ignore_punctuation,
4872 ..self
4873 }
4874 }
4875
4876 pub fn is_whitespace(&self, c: char) -> bool {
4877 self.kind(c) == CharKind::Whitespace
4878 }
4879
4880 pub fn is_word(&self, c: char) -> bool {
4881 self.kind(c) == CharKind::Word
4882 }
4883
4884 pub fn is_punctuation(&self, c: char) -> bool {
4885 self.kind(c) == CharKind::Punctuation
4886 }
4887
4888 pub fn kind_with(&self, c: char, ignore_punctuation: bool) -> CharKind {
4889 if c.is_alphanumeric() || c == '_' {
4890 return CharKind::Word;
4891 }
4892
4893 if let Some(scope) = &self.scope {
4894 let characters = if self.for_completion {
4895 scope.completion_query_characters()
4896 } else {
4897 scope.word_characters()
4898 };
4899 if let Some(characters) = characters {
4900 if characters.contains(&c) {
4901 return CharKind::Word;
4902 }
4903 }
4904 }
4905
4906 if c.is_whitespace() {
4907 return CharKind::Whitespace;
4908 }
4909
4910 if ignore_punctuation {
4911 CharKind::Word
4912 } else {
4913 CharKind::Punctuation
4914 }
4915 }
4916
4917 pub fn kind(&self, c: char) -> CharKind {
4918 self.kind_with(c, self.ignore_punctuation)
4919 }
4920}
4921
4922/// Find all of the ranges of whitespace that occur at the ends of lines
4923/// in the given rope.
4924///
4925/// This could also be done with a regex search, but this implementation
4926/// avoids copying text.
4927pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
4928 let mut ranges = Vec::new();
4929
4930 let mut offset = 0;
4931 let mut prev_chunk_trailing_whitespace_range = 0..0;
4932 for chunk in rope.chunks() {
4933 let mut prev_line_trailing_whitespace_range = 0..0;
4934 for (i, line) in chunk.split('\n').enumerate() {
4935 let line_end_offset = offset + line.len();
4936 let trimmed_line_len = line.trim_end_matches([' ', '\t']).len();
4937 let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
4938
4939 if i == 0 && trimmed_line_len == 0 {
4940 trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
4941 }
4942 if !prev_line_trailing_whitespace_range.is_empty() {
4943 ranges.push(prev_line_trailing_whitespace_range);
4944 }
4945
4946 offset = line_end_offset + 1;
4947 prev_line_trailing_whitespace_range = trailing_whitespace_range;
4948 }
4949
4950 offset -= 1;
4951 prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
4952 }
4953
4954 if !prev_chunk_trailing_whitespace_range.is_empty() {
4955 ranges.push(prev_chunk_trailing_whitespace_range);
4956 }
4957
4958 ranges
4959}