1pub use crate::{
2 Grammar, Language, LanguageRegistry,
3 diagnostic_set::DiagnosticSet,
4 highlight_map::{HighlightId, HighlightMap},
5 proto,
6};
7use crate::{
8 LanguageScope, Outline, OutlineConfig, RunnableCapture, RunnableTag, TextObject,
9 TreeSitterOptions,
10 diagnostic_set::{DiagnosticEntry, DiagnosticGroup},
11 language_settings::{LanguageSettings, language_settings},
12 outline::OutlineItem,
13 syntax_map::{
14 SyntaxLayer, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures, SyntaxMapMatch,
15 SyntaxMapMatches, SyntaxSnapshot, ToTreeSitterPoint,
16 },
17 task_context::RunnableRange,
18 text_diff::text_diff,
19};
20use anyhow::{Context as _, Result};
21use async_watch as watch;
22pub use clock::ReplicaId;
23use clock::{AGENT_REPLICA_ID, Lamport};
24use collections::HashMap;
25use fs::MTime;
26use futures::channel::oneshot;
27use gpui::{
28 App, AppContext as _, Context, Entity, EventEmitter, HighlightStyle, SharedString, StyledText,
29 Task, TaskLabel, TextStyle,
30};
31use lsp::{LanguageServerId, NumberOrString};
32use parking_lot::Mutex;
33use schemars::JsonSchema;
34use serde::{Deserialize, Serialize};
35use serde_json::Value;
36use settings::WorktreeId;
37use smallvec::SmallVec;
38use smol::future::yield_now;
39use std::{
40 any::Any,
41 borrow::Cow,
42 cell::Cell,
43 cmp::{self, Ordering, Reverse},
44 collections::{BTreeMap, BTreeSet},
45 ffi::OsStr,
46 future::Future,
47 iter::{self, Iterator, Peekable},
48 mem,
49 num::NonZeroU32,
50 ops::{Deref, Range},
51 path::{Path, PathBuf},
52 rc,
53 sync::{Arc, LazyLock},
54 time::{Duration, Instant},
55 vec,
56};
57use sum_tree::TreeMap;
58use text::operation_queue::OperationQueue;
59use text::*;
60pub use text::{
61 Anchor, Bias, Buffer as TextBuffer, BufferId, BufferSnapshot as TextBufferSnapshot, Edit,
62 LineIndent, OffsetRangeExt, OffsetUtf16, Patch, Point, PointUtf16, Rope, Selection,
63 SelectionGoal, Subscription, TextDimension, TextSummary, ToOffset, ToOffsetUtf16, ToPoint,
64 ToPointUtf16, Transaction, TransactionId, Unclipped,
65};
66use theme::{ActiveTheme as _, SyntaxTheme};
67#[cfg(any(test, feature = "test-support"))]
68use util::RandomCharIter;
69use util::{RangeExt, debug_panic, maybe};
70
71#[cfg(any(test, feature = "test-support"))]
72pub use {tree_sitter_python, tree_sitter_rust, tree_sitter_typescript};
73
74pub use lsp::DiagnosticSeverity;
75
76/// A label for the background task spawned by the buffer to compute
77/// a diff against the contents of its file.
78pub static BUFFER_DIFF_TASK: LazyLock<TaskLabel> = LazyLock::new(TaskLabel::new);
79
80/// Indicate whether a [`Buffer`] has permissions to edit.
81#[derive(PartialEq, Clone, Copy, Debug)]
82pub enum Capability {
83 /// The buffer is a mutable replica.
84 ReadWrite,
85 /// The buffer is a read-only replica.
86 ReadOnly,
87}
88
89pub type BufferRow = u32;
90
91/// An in-memory representation of a source code file, including its text,
92/// syntax trees, git status, and diagnostics.
93pub struct Buffer {
94 text: TextBuffer,
95 branch_state: Option<BufferBranchState>,
96 /// Filesystem state, `None` when there is no path.
97 file: Option<Arc<dyn File>>,
98 /// The mtime of the file when this buffer was last loaded from
99 /// or saved to disk.
100 saved_mtime: Option<MTime>,
101 /// The version vector when this buffer was last loaded from
102 /// or saved to disk.
103 saved_version: clock::Global,
104 preview_version: clock::Global,
105 transaction_depth: usize,
106 was_dirty_before_starting_transaction: Option<bool>,
107 reload_task: Option<Task<Result<()>>>,
108 language: Option<Arc<Language>>,
109 autoindent_requests: Vec<Arc<AutoindentRequest>>,
110 pending_autoindent: Option<Task<()>>,
111 sync_parse_timeout: Duration,
112 syntax_map: Mutex<SyntaxMap>,
113 reparse: Option<Task<()>>,
114 parse_status: (watch::Sender<ParseStatus>, watch::Receiver<ParseStatus>),
115 non_text_state_update_count: usize,
116 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
117 remote_selections: TreeMap<ReplicaId, SelectionSet>,
118 diagnostics_timestamp: clock::Lamport,
119 completion_triggers: BTreeSet<String>,
120 completion_triggers_per_language_server: HashMap<LanguageServerId, BTreeSet<String>>,
121 completion_triggers_timestamp: clock::Lamport,
122 deferred_ops: OperationQueue<Operation>,
123 capability: Capability,
124 has_conflict: bool,
125 /// Memoize calls to has_changes_since(saved_version).
126 /// The contents of a cell are (self.version, has_changes) at the time of a last call.
127 has_unsaved_edits: Cell<(clock::Global, bool)>,
128 change_bits: Vec<rc::Weak<Cell<bool>>>,
129 _subscriptions: Vec<gpui::Subscription>,
130 /// The result id received last time when pulling diagnostics for this buffer.
131 pull_diagnostics_result_id: Option<String>,
132}
133
134#[derive(Copy, Clone, Debug, PartialEq, Eq)]
135pub enum ParseStatus {
136 Idle,
137 Parsing,
138}
139
140struct BufferBranchState {
141 base_buffer: Entity<Buffer>,
142 merged_operations: Vec<Lamport>,
143}
144
145/// An immutable, cheaply cloneable representation of a fixed
146/// state of a buffer.
147pub struct BufferSnapshot {
148 pub text: text::BufferSnapshot,
149 pub(crate) syntax: SyntaxSnapshot,
150 file: Option<Arc<dyn File>>,
151 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
152 remote_selections: TreeMap<ReplicaId, SelectionSet>,
153 language: Option<Arc<Language>>,
154 non_text_state_update_count: usize,
155}
156
157/// The kind and amount of indentation in a particular line. For now,
158/// assumes that indentation is all the same character.
159#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
160pub struct IndentSize {
161 /// The number of bytes that comprise the indentation.
162 pub len: u32,
163 /// The kind of whitespace used for indentation.
164 pub kind: IndentKind,
165}
166
167/// A whitespace character that's used for indentation.
168#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
169pub enum IndentKind {
170 /// An ASCII space character.
171 #[default]
172 Space,
173 /// An ASCII tab character.
174 Tab,
175}
176
177/// The shape of a selection cursor.
178#[derive(Copy, Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq, JsonSchema)]
179#[serde(rename_all = "snake_case")]
180pub enum CursorShape {
181 /// A vertical bar
182 #[default]
183 Bar,
184 /// A block that surrounds the following character
185 Block,
186 /// An underline that runs along the following character
187 Underline,
188 /// A box drawn around the following character
189 Hollow,
190}
191
192#[derive(Clone, Debug)]
193struct SelectionSet {
194 line_mode: bool,
195 cursor_shape: CursorShape,
196 selections: Arc<[Selection<Anchor>]>,
197 lamport_timestamp: clock::Lamport,
198}
199
200/// A diagnostic associated with a certain range of a buffer.
201#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
202pub struct Diagnostic {
203 /// The name of the service that produced this diagnostic.
204 pub source: Option<String>,
205 /// A machine-readable code that identifies this diagnostic.
206 pub code: Option<NumberOrString>,
207 pub code_description: Option<lsp::Url>,
208 /// Whether this diagnostic is a hint, warning, or error.
209 pub severity: DiagnosticSeverity,
210 /// The human-readable message associated with this diagnostic.
211 pub message: String,
212 /// The human-readable message (in markdown format)
213 pub markdown: Option<String>,
214 /// An id that identifies the group to which this diagnostic belongs.
215 ///
216 /// When a language server produces a diagnostic with
217 /// one or more associated diagnostics, those diagnostics are all
218 /// assigned a single group ID.
219 pub group_id: usize,
220 /// Whether this diagnostic is the primary diagnostic for its group.
221 ///
222 /// In a given group, the primary diagnostic is the top-level diagnostic
223 /// returned by the language server. The non-primary diagnostics are the
224 /// associated diagnostics.
225 pub is_primary: bool,
226 /// Whether this diagnostic is considered to originate from an analysis of
227 /// files on disk, as opposed to any unsaved buffer contents. This is a
228 /// property of a given diagnostic source, and is configured for a given
229 /// language server via the [`LspAdapter::disk_based_diagnostic_sources`](crate::LspAdapter::disk_based_diagnostic_sources) method
230 /// for the language server.
231 pub is_disk_based: bool,
232 /// Whether this diagnostic marks unnecessary code.
233 pub is_unnecessary: bool,
234 /// Quick separation of diagnostics groups based by their source.
235 pub source_kind: DiagnosticSourceKind,
236 /// Data from language server that produced this diagnostic. Passed back to the LS when we request code actions for this diagnostic.
237 pub data: Option<Value>,
238 /// Whether to underline the corresponding text range in the editor.
239 pub underline: bool,
240}
241
242#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)]
243pub enum DiagnosticSourceKind {
244 Pulled,
245 Pushed,
246 Other,
247}
248
249/// An operation used to synchronize this buffer with its other replicas.
250#[derive(Clone, Debug, PartialEq)]
251pub enum Operation {
252 /// A text operation.
253 Buffer(text::Operation),
254
255 /// An update to the buffer's diagnostics.
256 UpdateDiagnostics {
257 /// The id of the language server that produced the new diagnostics.
258 server_id: LanguageServerId,
259 /// The diagnostics.
260 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
261 /// The buffer's lamport timestamp.
262 lamport_timestamp: clock::Lamport,
263 },
264
265 /// An update to the most recent selections in this buffer.
266 UpdateSelections {
267 /// The selections.
268 selections: Arc<[Selection<Anchor>]>,
269 /// The buffer's lamport timestamp.
270 lamport_timestamp: clock::Lamport,
271 /// Whether the selections are in 'line mode'.
272 line_mode: bool,
273 /// The [`CursorShape`] associated with these selections.
274 cursor_shape: CursorShape,
275 },
276
277 /// An update to the characters that should trigger autocompletion
278 /// for this buffer.
279 UpdateCompletionTriggers {
280 /// The characters that trigger autocompletion.
281 triggers: Vec<String>,
282 /// The buffer's lamport timestamp.
283 lamport_timestamp: clock::Lamport,
284 /// The language server ID.
285 server_id: LanguageServerId,
286 },
287}
288
289/// An event that occurs in a buffer.
290#[derive(Clone, Debug, PartialEq)]
291pub enum BufferEvent {
292 /// The buffer was changed in a way that must be
293 /// propagated to its other replicas.
294 Operation {
295 operation: Operation,
296 is_local: bool,
297 },
298 /// The buffer was edited.
299 Edited,
300 /// The buffer's `dirty` bit changed.
301 DirtyChanged,
302 /// The buffer was saved.
303 Saved,
304 /// The buffer's file was changed on disk.
305 FileHandleChanged,
306 /// The buffer was reloaded.
307 Reloaded,
308 /// The buffer is in need of a reload
309 ReloadNeeded,
310 /// The buffer's language was changed.
311 LanguageChanged,
312 /// The buffer's syntax trees were updated.
313 Reparsed,
314 /// The buffer's diagnostics were updated.
315 DiagnosticsUpdated,
316 /// The buffer gained or lost editing capabilities.
317 CapabilityChanged,
318 /// The buffer was explicitly requested to close.
319 Closed,
320 /// The buffer was discarded when closing.
321 Discarded,
322}
323
324/// The file associated with a buffer.
325pub trait File: Send + Sync + Any {
326 /// Returns the [`LocalFile`] associated with this file, if the
327 /// file is local.
328 fn as_local(&self) -> Option<&dyn LocalFile>;
329
330 /// Returns whether this file is local.
331 fn is_local(&self) -> bool {
332 self.as_local().is_some()
333 }
334
335 /// Returns whether the file is new, exists in storage, or has been deleted. Includes metadata
336 /// only available in some states, such as modification time.
337 fn disk_state(&self) -> DiskState;
338
339 /// Returns the path of this file relative to the worktree's root directory.
340 fn path(&self) -> &Arc<Path>;
341
342 /// Returns the path of this file relative to the worktree's parent directory (this means it
343 /// includes the name of the worktree's root folder).
344 fn full_path(&self, cx: &App) -> PathBuf;
345
346 /// Returns the last component of this handle's absolute path. If this handle refers to the root
347 /// of its worktree, then this method will return the name of the worktree itself.
348 fn file_name<'a>(&'a self, cx: &'a App) -> &'a OsStr;
349
350 /// Returns the id of the worktree to which this file belongs.
351 ///
352 /// This is needed for looking up project-specific settings.
353 fn worktree_id(&self, cx: &App) -> WorktreeId;
354
355 /// Converts this file into a protobuf message.
356 fn to_proto(&self, cx: &App) -> rpc::proto::File;
357
358 /// Return whether Zed considers this to be a private file.
359 fn is_private(&self) -> bool;
360}
361
362/// The file's storage status - whether it's stored (`Present`), and if so when it was last
363/// modified. In the case where the file is not stored, it can be either `New` or `Deleted`. In the
364/// UI these two states are distinguished. For example, the buffer tab does not display a deletion
365/// indicator for new files.
366#[derive(Copy, Clone, Debug, PartialEq)]
367pub enum DiskState {
368 /// File created in Zed that has not been saved.
369 New,
370 /// File present on the filesystem.
371 Present { mtime: MTime },
372 /// Deleted file that was previously present.
373 Deleted,
374}
375
376impl DiskState {
377 /// Returns the file's last known modification time on disk.
378 pub fn mtime(self) -> Option<MTime> {
379 match self {
380 DiskState::New => None,
381 DiskState::Present { mtime } => Some(mtime),
382 DiskState::Deleted => None,
383 }
384 }
385
386 pub fn exists(&self) -> bool {
387 match self {
388 DiskState::New => false,
389 DiskState::Present { .. } => true,
390 DiskState::Deleted => false,
391 }
392 }
393}
394
395/// The file associated with a buffer, in the case where the file is on the local disk.
396pub trait LocalFile: File {
397 /// Returns the absolute path of this file
398 fn abs_path(&self, cx: &App) -> PathBuf;
399
400 /// Loads the file contents from disk and returns them as a UTF-8 encoded string.
401 fn load(&self, cx: &App) -> Task<Result<String>>;
402
403 /// Loads the file's contents from disk.
404 fn load_bytes(&self, cx: &App) -> Task<Result<Vec<u8>>>;
405}
406
407/// The auto-indent behavior associated with an editing operation.
408/// For some editing operations, each affected line of text has its
409/// indentation recomputed. For other operations, the entire block
410/// of edited text is adjusted uniformly.
411#[derive(Clone, Debug)]
412pub enum AutoindentMode {
413 /// Indent each line of inserted text.
414 EachLine,
415 /// Apply the same indentation adjustment to all of the lines
416 /// in a given insertion.
417 Block {
418 /// The original indentation column of the first line of each
419 /// insertion, if it has been copied.
420 ///
421 /// Knowing this makes it possible to preserve the relative indentation
422 /// of every line in the insertion from when it was copied.
423 ///
424 /// If the original indent column is `a`, and the first line of insertion
425 /// is then auto-indented to column `b`, then every other line of
426 /// the insertion will be auto-indented to column `b - a`
427 original_indent_columns: Vec<Option<u32>>,
428 },
429}
430
431#[derive(Clone)]
432struct AutoindentRequest {
433 before_edit: BufferSnapshot,
434 entries: Vec<AutoindentRequestEntry>,
435 is_block_mode: bool,
436 ignore_empty_lines: bool,
437}
438
439#[derive(Debug, Clone)]
440struct AutoindentRequestEntry {
441 /// A range of the buffer whose indentation should be adjusted.
442 range: Range<Anchor>,
443 /// Whether or not these lines should be considered brand new, for the
444 /// purpose of auto-indent. When text is not new, its indentation will
445 /// only be adjusted if the suggested indentation level has *changed*
446 /// since the edit was made.
447 first_line_is_new: bool,
448 indent_size: IndentSize,
449 original_indent_column: Option<u32>,
450}
451
452#[derive(Debug)]
453struct IndentSuggestion {
454 basis_row: u32,
455 delta: Ordering,
456 within_error: bool,
457}
458
459struct BufferChunkHighlights<'a> {
460 captures: SyntaxMapCaptures<'a>,
461 next_capture: Option<SyntaxMapCapture<'a>>,
462 stack: Vec<(usize, HighlightId)>,
463 highlight_maps: Vec<HighlightMap>,
464}
465
466/// An iterator that yields chunks of a buffer's text, along with their
467/// syntax highlights and diagnostic status.
468pub struct BufferChunks<'a> {
469 buffer_snapshot: Option<&'a BufferSnapshot>,
470 range: Range<usize>,
471 chunks: text::Chunks<'a>,
472 diagnostic_endpoints: Option<Peekable<vec::IntoIter<DiagnosticEndpoint>>>,
473 error_depth: usize,
474 warning_depth: usize,
475 information_depth: usize,
476 hint_depth: usize,
477 unnecessary_depth: usize,
478 underline: bool,
479 highlights: Option<BufferChunkHighlights<'a>>,
480}
481
482/// A chunk of a buffer's text, along with its syntax highlight and
483/// diagnostic status.
484#[derive(Clone, Debug, Default)]
485pub struct Chunk<'a> {
486 /// The text of the chunk.
487 pub text: &'a str,
488 /// The syntax highlighting style of the chunk.
489 pub syntax_highlight_id: Option<HighlightId>,
490 /// The highlight style that has been applied to this chunk in
491 /// the editor.
492 pub highlight_style: Option<HighlightStyle>,
493 /// The severity of diagnostic associated with this chunk, if any.
494 pub diagnostic_severity: Option<DiagnosticSeverity>,
495 /// Whether this chunk of text is marked as unnecessary.
496 pub is_unnecessary: bool,
497 /// Whether this chunk of text was originally a tab character.
498 pub is_tab: bool,
499 /// Whether this chunk of text was originally a tab character.
500 pub is_inlay: bool,
501 /// Whether to underline the corresponding text range in the editor.
502 pub underline: bool,
503}
504
505/// A set of edits to a given version of a buffer, computed asynchronously.
506#[derive(Debug)]
507pub struct Diff {
508 pub base_version: clock::Global,
509 pub line_ending: LineEnding,
510 pub edits: Vec<(Range<usize>, Arc<str>)>,
511}
512
513#[derive(Debug, Clone, Copy)]
514pub(crate) struct DiagnosticEndpoint {
515 offset: usize,
516 is_start: bool,
517 underline: bool,
518 severity: DiagnosticSeverity,
519 is_unnecessary: bool,
520}
521
522/// A class of characters, used for characterizing a run of text.
523#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
524pub enum CharKind {
525 /// Whitespace.
526 Whitespace,
527 /// Punctuation.
528 Punctuation,
529 /// Word.
530 Word,
531}
532
533/// A runnable is a set of data about a region that could be resolved into a task
534pub struct Runnable {
535 pub tags: SmallVec<[RunnableTag; 1]>,
536 pub language: Arc<Language>,
537 pub buffer: BufferId,
538}
539
540#[derive(Default, Clone, Debug)]
541pub struct HighlightedText {
542 pub text: SharedString,
543 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
544}
545
546#[derive(Default, Debug)]
547struct HighlightedTextBuilder {
548 pub text: String,
549 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
550}
551
552impl HighlightedText {
553 pub fn from_buffer_range<T: ToOffset>(
554 range: Range<T>,
555 snapshot: &text::BufferSnapshot,
556 syntax_snapshot: &SyntaxSnapshot,
557 override_style: Option<HighlightStyle>,
558 syntax_theme: &SyntaxTheme,
559 ) -> Self {
560 let mut highlighted_text = HighlightedTextBuilder::default();
561 highlighted_text.add_text_from_buffer_range(
562 range,
563 snapshot,
564 syntax_snapshot,
565 override_style,
566 syntax_theme,
567 );
568 highlighted_text.build()
569 }
570
571 pub fn to_styled_text(&self, default_style: &TextStyle) -> StyledText {
572 gpui::StyledText::new(self.text.clone())
573 .with_default_highlights(default_style, self.highlights.iter().cloned())
574 }
575
576 /// Returns the first line without leading whitespace unless highlighted
577 /// and a boolean indicating if there are more lines after
578 pub fn first_line_preview(self) -> (Self, bool) {
579 let newline_ix = self.text.find('\n').unwrap_or(self.text.len());
580 let first_line = &self.text[..newline_ix];
581
582 // Trim leading whitespace, unless an edit starts prior to it.
583 let mut preview_start_ix = first_line.len() - first_line.trim_start().len();
584 if let Some((first_highlight_range, _)) = self.highlights.first() {
585 preview_start_ix = preview_start_ix.min(first_highlight_range.start);
586 }
587
588 let preview_text = &first_line[preview_start_ix..];
589 let preview_highlights = self
590 .highlights
591 .into_iter()
592 .take_while(|(range, _)| range.start < newline_ix)
593 .filter_map(|(mut range, highlight)| {
594 range.start = range.start.saturating_sub(preview_start_ix);
595 range.end = range.end.saturating_sub(preview_start_ix).min(newline_ix);
596 if range.is_empty() {
597 None
598 } else {
599 Some((range, highlight))
600 }
601 });
602
603 let preview = Self {
604 text: SharedString::new(preview_text),
605 highlights: preview_highlights.collect(),
606 };
607
608 (preview, self.text.len() > newline_ix)
609 }
610}
611
612impl HighlightedTextBuilder {
613 pub fn build(self) -> HighlightedText {
614 HighlightedText {
615 text: self.text.into(),
616 highlights: self.highlights,
617 }
618 }
619
620 pub fn add_text_from_buffer_range<T: ToOffset>(
621 &mut self,
622 range: Range<T>,
623 snapshot: &text::BufferSnapshot,
624 syntax_snapshot: &SyntaxSnapshot,
625 override_style: Option<HighlightStyle>,
626 syntax_theme: &SyntaxTheme,
627 ) {
628 let range = range.to_offset(snapshot);
629 for chunk in Self::highlighted_chunks(range, snapshot, syntax_snapshot) {
630 let start = self.text.len();
631 self.text.push_str(chunk.text);
632 let end = self.text.len();
633
634 if let Some(mut highlight_style) = chunk
635 .syntax_highlight_id
636 .and_then(|id| id.style(syntax_theme))
637 {
638 if let Some(override_style) = override_style {
639 highlight_style.highlight(override_style);
640 }
641 self.highlights.push((start..end, highlight_style));
642 } else if let Some(override_style) = override_style {
643 self.highlights.push((start..end, override_style));
644 }
645 }
646 }
647
648 fn highlighted_chunks<'a>(
649 range: Range<usize>,
650 snapshot: &'a text::BufferSnapshot,
651 syntax_snapshot: &'a SyntaxSnapshot,
652 ) -> BufferChunks<'a> {
653 let captures = syntax_snapshot.captures(range.clone(), snapshot, |grammar| {
654 grammar.highlights_query.as_ref()
655 });
656
657 let highlight_maps = captures
658 .grammars()
659 .iter()
660 .map(|grammar| grammar.highlight_map())
661 .collect();
662
663 BufferChunks::new(
664 snapshot.as_rope(),
665 range,
666 Some((captures, highlight_maps)),
667 false,
668 None,
669 )
670 }
671}
672
673#[derive(Clone)]
674pub struct EditPreview {
675 old_snapshot: text::BufferSnapshot,
676 applied_edits_snapshot: text::BufferSnapshot,
677 syntax_snapshot: SyntaxSnapshot,
678}
679
680impl EditPreview {
681 pub fn highlight_edits(
682 &self,
683 current_snapshot: &BufferSnapshot,
684 edits: &[(Range<Anchor>, String)],
685 include_deletions: bool,
686 cx: &App,
687 ) -> HighlightedText {
688 let Some(visible_range_in_preview_snapshot) = self.compute_visible_range(edits) else {
689 return HighlightedText::default();
690 };
691
692 let mut highlighted_text = HighlightedTextBuilder::default();
693
694 let mut offset_in_preview_snapshot = visible_range_in_preview_snapshot.start;
695
696 let insertion_highlight_style = HighlightStyle {
697 background_color: Some(cx.theme().status().created_background),
698 ..Default::default()
699 };
700 let deletion_highlight_style = HighlightStyle {
701 background_color: Some(cx.theme().status().deleted_background),
702 ..Default::default()
703 };
704 let syntax_theme = cx.theme().syntax();
705
706 for (range, edit_text) in edits {
707 let edit_new_end_in_preview_snapshot = range
708 .end
709 .bias_right(&self.old_snapshot)
710 .to_offset(&self.applied_edits_snapshot);
711 let edit_start_in_preview_snapshot = edit_new_end_in_preview_snapshot - edit_text.len();
712
713 let unchanged_range_in_preview_snapshot =
714 offset_in_preview_snapshot..edit_start_in_preview_snapshot;
715 if !unchanged_range_in_preview_snapshot.is_empty() {
716 highlighted_text.add_text_from_buffer_range(
717 unchanged_range_in_preview_snapshot,
718 &self.applied_edits_snapshot,
719 &self.syntax_snapshot,
720 None,
721 &syntax_theme,
722 );
723 }
724
725 let range_in_current_snapshot = range.to_offset(current_snapshot);
726 if include_deletions && !range_in_current_snapshot.is_empty() {
727 highlighted_text.add_text_from_buffer_range(
728 range_in_current_snapshot,
729 ¤t_snapshot.text,
730 ¤t_snapshot.syntax,
731 Some(deletion_highlight_style),
732 &syntax_theme,
733 );
734 }
735
736 if !edit_text.is_empty() {
737 highlighted_text.add_text_from_buffer_range(
738 edit_start_in_preview_snapshot..edit_new_end_in_preview_snapshot,
739 &self.applied_edits_snapshot,
740 &self.syntax_snapshot,
741 Some(insertion_highlight_style),
742 &syntax_theme,
743 );
744 }
745
746 offset_in_preview_snapshot = edit_new_end_in_preview_snapshot;
747 }
748
749 highlighted_text.add_text_from_buffer_range(
750 offset_in_preview_snapshot..visible_range_in_preview_snapshot.end,
751 &self.applied_edits_snapshot,
752 &self.syntax_snapshot,
753 None,
754 &syntax_theme,
755 );
756
757 highlighted_text.build()
758 }
759
760 fn compute_visible_range(&self, edits: &[(Range<Anchor>, String)]) -> Option<Range<usize>> {
761 let (first, _) = edits.first()?;
762 let (last, _) = edits.last()?;
763
764 let start = first
765 .start
766 .bias_left(&self.old_snapshot)
767 .to_point(&self.applied_edits_snapshot);
768 let end = last
769 .end
770 .bias_right(&self.old_snapshot)
771 .to_point(&self.applied_edits_snapshot);
772
773 // Ensure that the first line of the first edit and the last line of the last edit are always fully visible
774 let range = Point::new(start.row, 0)
775 ..Point::new(end.row, self.applied_edits_snapshot.line_len(end.row));
776
777 Some(range.to_offset(&self.applied_edits_snapshot))
778 }
779}
780
781#[derive(Clone, Debug, PartialEq, Eq)]
782pub struct BracketMatch {
783 pub open_range: Range<usize>,
784 pub close_range: Range<usize>,
785 pub newline_only: bool,
786}
787
788impl Buffer {
789 /// Create a new buffer with the given base text.
790 pub fn local<T: Into<String>>(base_text: T, cx: &Context<Self>) -> Self {
791 Self::build(
792 TextBuffer::new(0, cx.entity_id().as_non_zero_u64().into(), base_text.into()),
793 None,
794 Capability::ReadWrite,
795 )
796 }
797
798 /// Create a new buffer with the given base text that has proper line endings and other normalization applied.
799 pub fn local_normalized(
800 base_text_normalized: Rope,
801 line_ending: LineEnding,
802 cx: &Context<Self>,
803 ) -> Self {
804 Self::build(
805 TextBuffer::new_normalized(
806 0,
807 cx.entity_id().as_non_zero_u64().into(),
808 line_ending,
809 base_text_normalized,
810 ),
811 None,
812 Capability::ReadWrite,
813 )
814 }
815
816 /// Create a new buffer that is a replica of a remote buffer.
817 pub fn remote(
818 remote_id: BufferId,
819 replica_id: ReplicaId,
820 capability: Capability,
821 base_text: impl Into<String>,
822 ) -> Self {
823 Self::build(
824 TextBuffer::new(replica_id, remote_id, base_text.into()),
825 None,
826 capability,
827 )
828 }
829
830 /// Create a new buffer that is a replica of a remote buffer, populating its
831 /// state from the given protobuf message.
832 pub fn from_proto(
833 replica_id: ReplicaId,
834 capability: Capability,
835 message: proto::BufferState,
836 file: Option<Arc<dyn File>>,
837 ) -> Result<Self> {
838 let buffer_id = BufferId::new(message.id).context("Could not deserialize buffer_id")?;
839 let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text);
840 let mut this = Self::build(buffer, file, capability);
841 this.text.set_line_ending(proto::deserialize_line_ending(
842 rpc::proto::LineEnding::from_i32(message.line_ending).context("missing line_ending")?,
843 ));
844 this.saved_version = proto::deserialize_version(&message.saved_version);
845 this.saved_mtime = message.saved_mtime.map(|time| time.into());
846 Ok(this)
847 }
848
849 /// Serialize the buffer's state to a protobuf message.
850 pub fn to_proto(&self, cx: &App) -> proto::BufferState {
851 proto::BufferState {
852 id: self.remote_id().into(),
853 file: self.file.as_ref().map(|f| f.to_proto(cx)),
854 base_text: self.base_text().to_string(),
855 line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
856 saved_version: proto::serialize_version(&self.saved_version),
857 saved_mtime: self.saved_mtime.map(|time| time.into()),
858 }
859 }
860
861 /// Serialize as protobufs all of the changes to the buffer since the given version.
862 pub fn serialize_ops(
863 &self,
864 since: Option<clock::Global>,
865 cx: &App,
866 ) -> Task<Vec<proto::Operation>> {
867 let mut operations = Vec::new();
868 operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
869
870 operations.extend(self.remote_selections.iter().map(|(_, set)| {
871 proto::serialize_operation(&Operation::UpdateSelections {
872 selections: set.selections.clone(),
873 lamport_timestamp: set.lamport_timestamp,
874 line_mode: set.line_mode,
875 cursor_shape: set.cursor_shape,
876 })
877 }));
878
879 for (server_id, diagnostics) in &self.diagnostics {
880 operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
881 lamport_timestamp: self.diagnostics_timestamp,
882 server_id: *server_id,
883 diagnostics: diagnostics.iter().cloned().collect(),
884 }));
885 }
886
887 for (server_id, completions) in &self.completion_triggers_per_language_server {
888 operations.push(proto::serialize_operation(
889 &Operation::UpdateCompletionTriggers {
890 triggers: completions.iter().cloned().collect(),
891 lamport_timestamp: self.completion_triggers_timestamp,
892 server_id: *server_id,
893 },
894 ));
895 }
896
897 let text_operations = self.text.operations().clone();
898 cx.background_spawn(async move {
899 let since = since.unwrap_or_default();
900 operations.extend(
901 text_operations
902 .iter()
903 .filter(|(_, op)| !since.observed(op.timestamp()))
904 .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
905 );
906 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
907 operations
908 })
909 }
910
911 /// Assign a language to the buffer, returning the buffer.
912 pub fn with_language(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
913 self.set_language(Some(language), cx);
914 self
915 }
916
917 /// Returns the [`Capability`] of this buffer.
918 pub fn capability(&self) -> Capability {
919 self.capability
920 }
921
922 /// Whether this buffer can only be read.
923 pub fn read_only(&self) -> bool {
924 self.capability == Capability::ReadOnly
925 }
926
927 /// Builds a [`Buffer`] with the given underlying [`TextBuffer`], diff base, [`File`] and [`Capability`].
928 pub fn build(buffer: TextBuffer, file: Option<Arc<dyn File>>, capability: Capability) -> Self {
929 let saved_mtime = file.as_ref().and_then(|file| file.disk_state().mtime());
930 let snapshot = buffer.snapshot();
931 let syntax_map = Mutex::new(SyntaxMap::new(&snapshot));
932 Self {
933 saved_mtime,
934 saved_version: buffer.version(),
935 preview_version: buffer.version(),
936 reload_task: None,
937 transaction_depth: 0,
938 was_dirty_before_starting_transaction: None,
939 has_unsaved_edits: Cell::new((buffer.version(), false)),
940 text: buffer,
941 branch_state: None,
942 file,
943 capability,
944 syntax_map,
945 reparse: None,
946 non_text_state_update_count: 0,
947 sync_parse_timeout: Duration::from_millis(1),
948 parse_status: async_watch::channel(ParseStatus::Idle),
949 autoindent_requests: Default::default(),
950 pending_autoindent: Default::default(),
951 language: None,
952 remote_selections: Default::default(),
953 diagnostics: Default::default(),
954 diagnostics_timestamp: Default::default(),
955 completion_triggers: Default::default(),
956 completion_triggers_per_language_server: Default::default(),
957 completion_triggers_timestamp: Default::default(),
958 deferred_ops: OperationQueue::new(),
959 has_conflict: false,
960 pull_diagnostics_result_id: None,
961 change_bits: Default::default(),
962 _subscriptions: Vec::new(),
963 }
964 }
965
966 pub fn build_snapshot(
967 text: Rope,
968 language: Option<Arc<Language>>,
969 language_registry: Option<Arc<LanguageRegistry>>,
970 cx: &mut App,
971 ) -> impl Future<Output = BufferSnapshot> + use<> {
972 let entity_id = cx.reserve_entity::<Self>().entity_id();
973 let buffer_id = entity_id.as_non_zero_u64().into();
974 async move {
975 let text =
976 TextBuffer::new_normalized(0, buffer_id, Default::default(), text).snapshot();
977 let mut syntax = SyntaxMap::new(&text).snapshot();
978 if let Some(language) = language.clone() {
979 let text = text.clone();
980 let language = language.clone();
981 let language_registry = language_registry.clone();
982 syntax.reparse(&text, language_registry, language);
983 }
984 BufferSnapshot {
985 text,
986 syntax,
987 file: None,
988 diagnostics: Default::default(),
989 remote_selections: Default::default(),
990 language,
991 non_text_state_update_count: 0,
992 }
993 }
994 }
995
996 pub fn build_empty_snapshot(cx: &mut App) -> BufferSnapshot {
997 let entity_id = cx.reserve_entity::<Self>().entity_id();
998 let buffer_id = entity_id.as_non_zero_u64().into();
999 let text =
1000 TextBuffer::new_normalized(0, buffer_id, Default::default(), Rope::new()).snapshot();
1001 let syntax = SyntaxMap::new(&text).snapshot();
1002 BufferSnapshot {
1003 text,
1004 syntax,
1005 file: None,
1006 diagnostics: Default::default(),
1007 remote_selections: Default::default(),
1008 language: None,
1009 non_text_state_update_count: 0,
1010 }
1011 }
1012
1013 #[cfg(any(test, feature = "test-support"))]
1014 pub fn build_snapshot_sync(
1015 text: Rope,
1016 language: Option<Arc<Language>>,
1017 language_registry: Option<Arc<LanguageRegistry>>,
1018 cx: &mut App,
1019 ) -> BufferSnapshot {
1020 let entity_id = cx.reserve_entity::<Self>().entity_id();
1021 let buffer_id = entity_id.as_non_zero_u64().into();
1022 let text = TextBuffer::new_normalized(0, buffer_id, Default::default(), text).snapshot();
1023 let mut syntax = SyntaxMap::new(&text).snapshot();
1024 if let Some(language) = language.clone() {
1025 let text = text.clone();
1026 let language = language.clone();
1027 let language_registry = language_registry.clone();
1028 syntax.reparse(&text, language_registry, language);
1029 }
1030 BufferSnapshot {
1031 text,
1032 syntax,
1033 file: None,
1034 diagnostics: Default::default(),
1035 remote_selections: Default::default(),
1036 language,
1037 non_text_state_update_count: 0,
1038 }
1039 }
1040
1041 /// Retrieve a snapshot of the buffer's current state. This is computationally
1042 /// cheap, and allows reading from the buffer on a background thread.
1043 pub fn snapshot(&self) -> BufferSnapshot {
1044 let text = self.text.snapshot();
1045 let mut syntax_map = self.syntax_map.lock();
1046 syntax_map.interpolate(&text);
1047 let syntax = syntax_map.snapshot();
1048
1049 BufferSnapshot {
1050 text,
1051 syntax,
1052 file: self.file.clone(),
1053 remote_selections: self.remote_selections.clone(),
1054 diagnostics: self.diagnostics.clone(),
1055 language: self.language.clone(),
1056 non_text_state_update_count: self.non_text_state_update_count,
1057 }
1058 }
1059
1060 pub fn branch(&mut self, cx: &mut Context<Self>) -> Entity<Self> {
1061 let this = cx.entity();
1062 cx.new(|cx| {
1063 let mut branch = Self {
1064 branch_state: Some(BufferBranchState {
1065 base_buffer: this.clone(),
1066 merged_operations: Default::default(),
1067 }),
1068 language: self.language.clone(),
1069 has_conflict: self.has_conflict,
1070 has_unsaved_edits: Cell::new(self.has_unsaved_edits.get_mut().clone()),
1071 _subscriptions: vec![cx.subscribe(&this, Self::on_base_buffer_event)],
1072 ..Self::build(self.text.branch(), self.file.clone(), self.capability())
1073 };
1074 if let Some(language_registry) = self.language_registry() {
1075 branch.set_language_registry(language_registry);
1076 }
1077
1078 // Reparse the branch buffer so that we get syntax highlighting immediately.
1079 branch.reparse(cx);
1080
1081 branch
1082 })
1083 }
1084
1085 pub fn preview_edits(
1086 &self,
1087 edits: Arc<[(Range<Anchor>, String)]>,
1088 cx: &App,
1089 ) -> Task<EditPreview> {
1090 let registry = self.language_registry();
1091 let language = self.language().cloned();
1092 let old_snapshot = self.text.snapshot();
1093 let mut branch_buffer = self.text.branch();
1094 let mut syntax_snapshot = self.syntax_map.lock().snapshot();
1095 cx.background_spawn(async move {
1096 if !edits.is_empty() {
1097 if let Some(language) = language.clone() {
1098 syntax_snapshot.reparse(&old_snapshot, registry.clone(), language);
1099 }
1100
1101 branch_buffer.edit(edits.iter().cloned());
1102 let snapshot = branch_buffer.snapshot();
1103 syntax_snapshot.interpolate(&snapshot);
1104
1105 if let Some(language) = language {
1106 syntax_snapshot.reparse(&snapshot, registry, language);
1107 }
1108 }
1109 EditPreview {
1110 old_snapshot,
1111 applied_edits_snapshot: branch_buffer.snapshot(),
1112 syntax_snapshot,
1113 }
1114 })
1115 }
1116
1117 /// Applies all of the changes in this buffer that intersect any of the
1118 /// given `ranges` to its base buffer.
1119 ///
1120 /// If `ranges` is empty, then all changes will be applied. This buffer must
1121 /// be a branch buffer to call this method.
1122 pub fn merge_into_base(&mut self, ranges: Vec<Range<usize>>, cx: &mut Context<Self>) {
1123 let Some(base_buffer) = self.base_buffer() else {
1124 debug_panic!("not a branch buffer");
1125 return;
1126 };
1127
1128 let mut ranges = if ranges.is_empty() {
1129 &[0..usize::MAX]
1130 } else {
1131 ranges.as_slice()
1132 }
1133 .into_iter()
1134 .peekable();
1135
1136 let mut edits = Vec::new();
1137 for edit in self.edits_since::<usize>(&base_buffer.read(cx).version()) {
1138 let mut is_included = false;
1139 while let Some(range) = ranges.peek() {
1140 if range.end < edit.new.start {
1141 ranges.next().unwrap();
1142 } else {
1143 if range.start <= edit.new.end {
1144 is_included = true;
1145 }
1146 break;
1147 }
1148 }
1149
1150 if is_included {
1151 edits.push((
1152 edit.old.clone(),
1153 self.text_for_range(edit.new.clone()).collect::<String>(),
1154 ));
1155 }
1156 }
1157
1158 let operation = base_buffer.update(cx, |base_buffer, cx| {
1159 // cx.emit(BufferEvent::DiffBaseChanged);
1160 base_buffer.edit(edits, None, cx)
1161 });
1162
1163 if let Some(operation) = operation {
1164 if let Some(BufferBranchState {
1165 merged_operations, ..
1166 }) = &mut self.branch_state
1167 {
1168 merged_operations.push(operation);
1169 }
1170 }
1171 }
1172
1173 fn on_base_buffer_event(
1174 &mut self,
1175 _: Entity<Buffer>,
1176 event: &BufferEvent,
1177 cx: &mut Context<Self>,
1178 ) {
1179 let BufferEvent::Operation { operation, .. } = event else {
1180 return;
1181 };
1182 let Some(BufferBranchState {
1183 merged_operations, ..
1184 }) = &mut self.branch_state
1185 else {
1186 return;
1187 };
1188
1189 let mut operation_to_undo = None;
1190 if let Operation::Buffer(text::Operation::Edit(operation)) = &operation {
1191 if let Ok(ix) = merged_operations.binary_search(&operation.timestamp) {
1192 merged_operations.remove(ix);
1193 operation_to_undo = Some(operation.timestamp);
1194 }
1195 }
1196
1197 self.apply_ops([operation.clone()], cx);
1198
1199 if let Some(timestamp) = operation_to_undo {
1200 let counts = [(timestamp, u32::MAX)].into_iter().collect();
1201 self.undo_operations(counts, cx);
1202 }
1203 }
1204
1205 #[cfg(test)]
1206 pub(crate) fn as_text_snapshot(&self) -> &text::BufferSnapshot {
1207 &self.text
1208 }
1209
1210 /// Retrieve a snapshot of the buffer's raw text, without any
1211 /// language-related state like the syntax tree or diagnostics.
1212 pub fn text_snapshot(&self) -> text::BufferSnapshot {
1213 self.text.snapshot()
1214 }
1215
1216 /// The file associated with the buffer, if any.
1217 pub fn file(&self) -> Option<&Arc<dyn File>> {
1218 self.file.as_ref()
1219 }
1220
1221 /// The version of the buffer that was last saved or reloaded from disk.
1222 pub fn saved_version(&self) -> &clock::Global {
1223 &self.saved_version
1224 }
1225
1226 /// The mtime of the buffer's file when the buffer was last saved or reloaded from disk.
1227 pub fn saved_mtime(&self) -> Option<MTime> {
1228 self.saved_mtime
1229 }
1230
1231 /// Assign a language to the buffer.
1232 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1233 self.non_text_state_update_count += 1;
1234 self.syntax_map.lock().clear(&self.text);
1235 self.language = language;
1236 self.was_changed();
1237 self.reparse(cx);
1238 cx.emit(BufferEvent::LanguageChanged);
1239 }
1240
1241 /// Assign a language registry to the buffer. This allows the buffer to retrieve
1242 /// other languages if parts of the buffer are written in different languages.
1243 pub fn set_language_registry(&self, language_registry: Arc<LanguageRegistry>) {
1244 self.syntax_map
1245 .lock()
1246 .set_language_registry(language_registry);
1247 }
1248
1249 pub fn language_registry(&self) -> Option<Arc<LanguageRegistry>> {
1250 self.syntax_map.lock().language_registry()
1251 }
1252
1253 /// Assign the buffer a new [`Capability`].
1254 pub fn set_capability(&mut self, capability: Capability, cx: &mut Context<Self>) {
1255 self.capability = capability;
1256 cx.emit(BufferEvent::CapabilityChanged)
1257 }
1258
1259 /// This method is called to signal that the buffer has been saved.
1260 pub fn did_save(
1261 &mut self,
1262 version: clock::Global,
1263 mtime: Option<MTime>,
1264 cx: &mut Context<Self>,
1265 ) {
1266 self.saved_version = version;
1267 self.has_unsaved_edits
1268 .set((self.saved_version().clone(), false));
1269 self.has_conflict = false;
1270 self.saved_mtime = mtime;
1271 self.was_changed();
1272 cx.emit(BufferEvent::Saved);
1273 cx.notify();
1274 }
1275
1276 /// This method is called to signal that the buffer has been discarded.
1277 pub fn discarded(&self, cx: &mut Context<Self>) {
1278 cx.emit(BufferEvent::Discarded);
1279 cx.notify();
1280 }
1281
1282 /// Reloads the contents of the buffer from disk.
1283 pub fn reload(&mut self, cx: &Context<Self>) -> oneshot::Receiver<Option<Transaction>> {
1284 let (tx, rx) = futures::channel::oneshot::channel();
1285 let prev_version = self.text.version();
1286 self.reload_task = Some(cx.spawn(async move |this, cx| {
1287 let Some((new_mtime, new_text)) = this.update(cx, |this, cx| {
1288 let file = this.file.as_ref()?.as_local()?;
1289
1290 Some((file.disk_state().mtime(), file.load(cx)))
1291 })?
1292 else {
1293 return Ok(());
1294 };
1295
1296 let new_text = new_text.await?;
1297 let diff = this
1298 .update(cx, |this, cx| this.diff(new_text.clone(), cx))?
1299 .await;
1300 this.update(cx, |this, cx| {
1301 if this.version() == diff.base_version {
1302 this.finalize_last_transaction();
1303 this.apply_diff(diff, cx);
1304 tx.send(this.finalize_last_transaction().cloned()).ok();
1305 this.has_conflict = false;
1306 this.did_reload(this.version(), this.line_ending(), new_mtime, cx);
1307 } else {
1308 if !diff.edits.is_empty()
1309 || this
1310 .edits_since::<usize>(&diff.base_version)
1311 .next()
1312 .is_some()
1313 {
1314 this.has_conflict = true;
1315 }
1316
1317 this.did_reload(prev_version, this.line_ending(), this.saved_mtime, cx);
1318 }
1319
1320 this.reload_task.take();
1321 })
1322 }));
1323 rx
1324 }
1325
1326 /// This method is called to signal that the buffer has been reloaded.
1327 pub fn did_reload(
1328 &mut self,
1329 version: clock::Global,
1330 line_ending: LineEnding,
1331 mtime: Option<MTime>,
1332 cx: &mut Context<Self>,
1333 ) {
1334 self.saved_version = version;
1335 self.has_unsaved_edits
1336 .set((self.saved_version.clone(), false));
1337 self.text.set_line_ending(line_ending);
1338 self.saved_mtime = mtime;
1339 cx.emit(BufferEvent::Reloaded);
1340 cx.notify();
1341 }
1342
1343 /// Updates the [`File`] backing this buffer. This should be called when
1344 /// the file has changed or has been deleted.
1345 pub fn file_updated(&mut self, new_file: Arc<dyn File>, cx: &mut Context<Self>) {
1346 let was_dirty = self.is_dirty();
1347 let mut file_changed = false;
1348
1349 if let Some(old_file) = self.file.as_ref() {
1350 if new_file.path() != old_file.path() {
1351 file_changed = true;
1352 }
1353
1354 let old_state = old_file.disk_state();
1355 let new_state = new_file.disk_state();
1356 if old_state != new_state {
1357 file_changed = true;
1358 if !was_dirty && matches!(new_state, DiskState::Present { .. }) {
1359 cx.emit(BufferEvent::ReloadNeeded)
1360 }
1361 }
1362 } else {
1363 file_changed = true;
1364 };
1365
1366 self.file = Some(new_file);
1367 if file_changed {
1368 self.was_changed();
1369 self.non_text_state_update_count += 1;
1370 if was_dirty != self.is_dirty() {
1371 cx.emit(BufferEvent::DirtyChanged);
1372 }
1373 cx.emit(BufferEvent::FileHandleChanged);
1374 cx.notify();
1375 }
1376 }
1377
1378 pub fn base_buffer(&self) -> Option<Entity<Self>> {
1379 Some(self.branch_state.as_ref()?.base_buffer.clone())
1380 }
1381
1382 /// Returns the primary [`Language`] assigned to this [`Buffer`].
1383 pub fn language(&self) -> Option<&Arc<Language>> {
1384 self.language.as_ref()
1385 }
1386
1387 /// Returns the [`Language`] at the given location.
1388 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
1389 let offset = position.to_offset(self);
1390 self.syntax_map
1391 .lock()
1392 .layers_for_range(offset..offset, &self.text, false)
1393 .last()
1394 .map(|info| info.language.clone())
1395 .or_else(|| self.language.clone())
1396 }
1397
1398 /// Returns each [`Language`] for the active syntax layers at the given location.
1399 pub fn languages_at<D: ToOffset>(&self, position: D) -> Vec<Arc<Language>> {
1400 let offset = position.to_offset(self);
1401 let mut languages: Vec<Arc<Language>> = self
1402 .syntax_map
1403 .lock()
1404 .layers_for_range(offset..offset, &self.text, false)
1405 .map(|info| info.language.clone())
1406 .collect();
1407
1408 if languages.is_empty() {
1409 if let Some(buffer_language) = self.language() {
1410 languages.push(buffer_language.clone());
1411 }
1412 }
1413
1414 languages
1415 }
1416
1417 /// An integer version number that accounts for all updates besides
1418 /// the buffer's text itself (which is versioned via a version vector).
1419 pub fn non_text_state_update_count(&self) -> usize {
1420 self.non_text_state_update_count
1421 }
1422
1423 /// Whether the buffer is being parsed in the background.
1424 #[cfg(any(test, feature = "test-support"))]
1425 pub fn is_parsing(&self) -> bool {
1426 self.reparse.is_some()
1427 }
1428
1429 /// Indicates whether the buffer contains any regions that may be
1430 /// written in a language that hasn't been loaded yet.
1431 pub fn contains_unknown_injections(&self) -> bool {
1432 self.syntax_map.lock().contains_unknown_injections()
1433 }
1434
1435 #[cfg(test)]
1436 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
1437 self.sync_parse_timeout = timeout;
1438 }
1439
1440 /// Called after an edit to synchronize the buffer's main parse tree with
1441 /// the buffer's new underlying state.
1442 ///
1443 /// Locks the syntax map and interpolates the edits since the last reparse
1444 /// into the foreground syntax tree.
1445 ///
1446 /// Then takes a stable snapshot of the syntax map before unlocking it.
1447 /// The snapshot with the interpolated edits is sent to a background thread,
1448 /// where we ask Tree-sitter to perform an incremental parse.
1449 ///
1450 /// Meanwhile, in the foreground, we block the main thread for up to 1ms
1451 /// waiting on the parse to complete. As soon as it completes, we proceed
1452 /// synchronously, unless a 1ms timeout elapses.
1453 ///
1454 /// If we time out waiting on the parse, we spawn a second task waiting
1455 /// until the parse does complete and return with the interpolated tree still
1456 /// in the foreground. When the background parse completes, call back into
1457 /// the main thread and assign the foreground parse state.
1458 ///
1459 /// If the buffer or grammar changed since the start of the background parse,
1460 /// initiate an additional reparse recursively. To avoid concurrent parses
1461 /// for the same buffer, we only initiate a new parse if we are not already
1462 /// parsing in the background.
1463 pub fn reparse(&mut self, cx: &mut Context<Self>) {
1464 if self.reparse.is_some() {
1465 return;
1466 }
1467 let language = if let Some(language) = self.language.clone() {
1468 language
1469 } else {
1470 return;
1471 };
1472
1473 let text = self.text_snapshot();
1474 let parsed_version = self.version();
1475
1476 let mut syntax_map = self.syntax_map.lock();
1477 syntax_map.interpolate(&text);
1478 let language_registry = syntax_map.language_registry();
1479 let mut syntax_snapshot = syntax_map.snapshot();
1480 drop(syntax_map);
1481
1482 let parse_task = cx.background_spawn({
1483 let language = language.clone();
1484 let language_registry = language_registry.clone();
1485 async move {
1486 syntax_snapshot.reparse(&text, language_registry, language);
1487 syntax_snapshot
1488 }
1489 });
1490
1491 self.parse_status.0.send(ParseStatus::Parsing).unwrap();
1492 match cx
1493 .background_executor()
1494 .block_with_timeout(self.sync_parse_timeout, parse_task)
1495 {
1496 Ok(new_syntax_snapshot) => {
1497 self.did_finish_parsing(new_syntax_snapshot, cx);
1498 self.reparse = None;
1499 }
1500 Err(parse_task) => {
1501 self.reparse = Some(cx.spawn(async move |this, cx| {
1502 let new_syntax_map = parse_task.await;
1503 this.update(cx, move |this, cx| {
1504 let grammar_changed =
1505 this.language.as_ref().map_or(true, |current_language| {
1506 !Arc::ptr_eq(&language, current_language)
1507 });
1508 let language_registry_changed = new_syntax_map
1509 .contains_unknown_injections()
1510 && language_registry.map_or(false, |registry| {
1511 registry.version() != new_syntax_map.language_registry_version()
1512 });
1513 let parse_again = language_registry_changed
1514 || grammar_changed
1515 || this.version.changed_since(&parsed_version);
1516 this.did_finish_parsing(new_syntax_map, cx);
1517 this.reparse = None;
1518 if parse_again {
1519 this.reparse(cx);
1520 }
1521 })
1522 .ok();
1523 }));
1524 }
1525 }
1526 }
1527
1528 fn did_finish_parsing(&mut self, syntax_snapshot: SyntaxSnapshot, cx: &mut Context<Self>) {
1529 self.was_changed();
1530 self.non_text_state_update_count += 1;
1531 self.syntax_map.lock().did_parse(syntax_snapshot);
1532 self.request_autoindent(cx);
1533 self.parse_status.0.send(ParseStatus::Idle).unwrap();
1534 cx.emit(BufferEvent::Reparsed);
1535 cx.notify();
1536 }
1537
1538 pub fn parse_status(&self) -> watch::Receiver<ParseStatus> {
1539 self.parse_status.1.clone()
1540 }
1541
1542 /// Assign to the buffer a set of diagnostics created by a given language server.
1543 pub fn update_diagnostics(
1544 &mut self,
1545 server_id: LanguageServerId,
1546 diagnostics: DiagnosticSet,
1547 cx: &mut Context<Self>,
1548 ) {
1549 let lamport_timestamp = self.text.lamport_clock.tick();
1550 let op = Operation::UpdateDiagnostics {
1551 server_id,
1552 diagnostics: diagnostics.iter().cloned().collect(),
1553 lamport_timestamp,
1554 };
1555 self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
1556 self.send_operation(op, true, cx);
1557 }
1558
1559 pub fn get_diagnostics(&self, server_id: LanguageServerId) -> Option<&DiagnosticSet> {
1560 let Ok(idx) = self.diagnostics.binary_search_by_key(&server_id, |v| v.0) else {
1561 return None;
1562 };
1563 Some(&self.diagnostics[idx].1)
1564 }
1565
1566 fn request_autoindent(&mut self, cx: &mut Context<Self>) {
1567 if let Some(indent_sizes) = self.compute_autoindents() {
1568 let indent_sizes = cx.background_spawn(indent_sizes);
1569 match cx
1570 .background_executor()
1571 .block_with_timeout(Duration::from_micros(500), indent_sizes)
1572 {
1573 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
1574 Err(indent_sizes) => {
1575 self.pending_autoindent = Some(cx.spawn(async move |this, cx| {
1576 let indent_sizes = indent_sizes.await;
1577 this.update(cx, |this, cx| {
1578 this.apply_autoindents(indent_sizes, cx);
1579 })
1580 .ok();
1581 }));
1582 }
1583 }
1584 } else {
1585 self.autoindent_requests.clear();
1586 }
1587 }
1588
1589 fn compute_autoindents(
1590 &self,
1591 ) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>> + use<>> {
1592 let max_rows_between_yields = 100;
1593 let snapshot = self.snapshot();
1594 if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
1595 return None;
1596 }
1597
1598 let autoindent_requests = self.autoindent_requests.clone();
1599 Some(async move {
1600 let mut indent_sizes = BTreeMap::<u32, (IndentSize, bool)>::new();
1601 for request in autoindent_requests {
1602 // Resolve each edited range to its row in the current buffer and in the
1603 // buffer before this batch of edits.
1604 let mut row_ranges = Vec::new();
1605 let mut old_to_new_rows = BTreeMap::new();
1606 let mut language_indent_sizes_by_new_row = Vec::new();
1607 for entry in &request.entries {
1608 let position = entry.range.start;
1609 let new_row = position.to_point(&snapshot).row;
1610 let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
1611 language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
1612
1613 if !entry.first_line_is_new {
1614 let old_row = position.to_point(&request.before_edit).row;
1615 old_to_new_rows.insert(old_row, new_row);
1616 }
1617 row_ranges.push((new_row..new_end_row, entry.original_indent_column));
1618 }
1619
1620 // Build a map containing the suggested indentation for each of the edited lines
1621 // with respect to the state of the buffer before these edits. This map is keyed
1622 // by the rows for these lines in the current state of the buffer.
1623 let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
1624 let old_edited_ranges =
1625 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
1626 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1627 let mut language_indent_size = IndentSize::default();
1628 for old_edited_range in old_edited_ranges {
1629 let suggestions = request
1630 .before_edit
1631 .suggest_autoindents(old_edited_range.clone())
1632 .into_iter()
1633 .flatten();
1634 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1635 if let Some(suggestion) = suggestion {
1636 let new_row = *old_to_new_rows.get(&old_row).unwrap();
1637
1638 // Find the indent size based on the language for this row.
1639 while let Some((row, size)) = language_indent_sizes.peek() {
1640 if *row > new_row {
1641 break;
1642 }
1643 language_indent_size = *size;
1644 language_indent_sizes.next();
1645 }
1646
1647 let suggested_indent = old_to_new_rows
1648 .get(&suggestion.basis_row)
1649 .and_then(|from_row| {
1650 Some(old_suggestions.get(from_row).copied()?.0)
1651 })
1652 .unwrap_or_else(|| {
1653 request
1654 .before_edit
1655 .indent_size_for_line(suggestion.basis_row)
1656 })
1657 .with_delta(suggestion.delta, language_indent_size);
1658 old_suggestions
1659 .insert(new_row, (suggested_indent, suggestion.within_error));
1660 }
1661 }
1662 yield_now().await;
1663 }
1664
1665 // Compute new suggestions for each line, but only include them in the result
1666 // if they differ from the old suggestion for that line.
1667 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1668 let mut language_indent_size = IndentSize::default();
1669 for (row_range, original_indent_column) in row_ranges {
1670 let new_edited_row_range = if request.is_block_mode {
1671 row_range.start..row_range.start + 1
1672 } else {
1673 row_range.clone()
1674 };
1675
1676 let suggestions = snapshot
1677 .suggest_autoindents(new_edited_row_range.clone())
1678 .into_iter()
1679 .flatten();
1680 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
1681 if let Some(suggestion) = suggestion {
1682 // Find the indent size based on the language for this row.
1683 while let Some((row, size)) = language_indent_sizes.peek() {
1684 if *row > new_row {
1685 break;
1686 }
1687 language_indent_size = *size;
1688 language_indent_sizes.next();
1689 }
1690
1691 let suggested_indent = indent_sizes
1692 .get(&suggestion.basis_row)
1693 .copied()
1694 .map(|e| e.0)
1695 .unwrap_or_else(|| {
1696 snapshot.indent_size_for_line(suggestion.basis_row)
1697 })
1698 .with_delta(suggestion.delta, language_indent_size);
1699
1700 if old_suggestions.get(&new_row).map_or(
1701 true,
1702 |(old_indentation, was_within_error)| {
1703 suggested_indent != *old_indentation
1704 && (!suggestion.within_error || *was_within_error)
1705 },
1706 ) {
1707 indent_sizes.insert(
1708 new_row,
1709 (suggested_indent, request.ignore_empty_lines),
1710 );
1711 }
1712 }
1713 }
1714
1715 if let (true, Some(original_indent_column)) =
1716 (request.is_block_mode, original_indent_column)
1717 {
1718 let new_indent =
1719 if let Some((indent, _)) = indent_sizes.get(&row_range.start) {
1720 *indent
1721 } else {
1722 snapshot.indent_size_for_line(row_range.start)
1723 };
1724 let delta = new_indent.len as i64 - original_indent_column as i64;
1725 if delta != 0 {
1726 for row in row_range.skip(1) {
1727 indent_sizes.entry(row).or_insert_with(|| {
1728 let mut size = snapshot.indent_size_for_line(row);
1729 if size.kind == new_indent.kind {
1730 match delta.cmp(&0) {
1731 Ordering::Greater => size.len += delta as u32,
1732 Ordering::Less => {
1733 size.len = size.len.saturating_sub(-delta as u32)
1734 }
1735 Ordering::Equal => {}
1736 }
1737 }
1738 (size, request.ignore_empty_lines)
1739 });
1740 }
1741 }
1742 }
1743
1744 yield_now().await;
1745 }
1746 }
1747
1748 indent_sizes
1749 .into_iter()
1750 .filter_map(|(row, (indent, ignore_empty_lines))| {
1751 if ignore_empty_lines && snapshot.line_len(row) == 0 {
1752 None
1753 } else {
1754 Some((row, indent))
1755 }
1756 })
1757 .collect()
1758 })
1759 }
1760
1761 fn apply_autoindents(
1762 &mut self,
1763 indent_sizes: BTreeMap<u32, IndentSize>,
1764 cx: &mut Context<Self>,
1765 ) {
1766 self.autoindent_requests.clear();
1767
1768 let edits: Vec<_> = indent_sizes
1769 .into_iter()
1770 .filter_map(|(row, indent_size)| {
1771 let current_size = indent_size_for_line(self, row);
1772 Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
1773 })
1774 .collect();
1775
1776 let preserve_preview = self.preserve_preview();
1777 self.edit(edits, None, cx);
1778 if preserve_preview {
1779 self.refresh_preview();
1780 }
1781 }
1782
1783 /// Create a minimal edit that will cause the given row to be indented
1784 /// with the given size. After applying this edit, the length of the line
1785 /// will always be at least `new_size.len`.
1786 pub fn edit_for_indent_size_adjustment(
1787 row: u32,
1788 current_size: IndentSize,
1789 new_size: IndentSize,
1790 ) -> Option<(Range<Point>, String)> {
1791 if new_size.kind == current_size.kind {
1792 match new_size.len.cmp(¤t_size.len) {
1793 Ordering::Greater => {
1794 let point = Point::new(row, 0);
1795 Some((
1796 point..point,
1797 iter::repeat(new_size.char())
1798 .take((new_size.len - current_size.len) as usize)
1799 .collect::<String>(),
1800 ))
1801 }
1802
1803 Ordering::Less => Some((
1804 Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
1805 String::new(),
1806 )),
1807
1808 Ordering::Equal => None,
1809 }
1810 } else {
1811 Some((
1812 Point::new(row, 0)..Point::new(row, current_size.len),
1813 iter::repeat(new_size.char())
1814 .take(new_size.len as usize)
1815 .collect::<String>(),
1816 ))
1817 }
1818 }
1819
1820 /// Spawns a background task that asynchronously computes a `Diff` between the buffer's text
1821 /// and the given new text.
1822 pub fn diff(&self, mut new_text: String, cx: &App) -> Task<Diff> {
1823 let old_text = self.as_rope().clone();
1824 let base_version = self.version();
1825 cx.background_executor()
1826 .spawn_labeled(*BUFFER_DIFF_TASK, async move {
1827 let old_text = old_text.to_string();
1828 let line_ending = LineEnding::detect(&new_text);
1829 LineEnding::normalize(&mut new_text);
1830 let edits = text_diff(&old_text, &new_text);
1831 Diff {
1832 base_version,
1833 line_ending,
1834 edits,
1835 }
1836 })
1837 }
1838
1839 /// Spawns a background task that searches the buffer for any whitespace
1840 /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
1841 pub fn remove_trailing_whitespace(&self, cx: &App) -> Task<Diff> {
1842 let old_text = self.as_rope().clone();
1843 let line_ending = self.line_ending();
1844 let base_version = self.version();
1845 cx.background_spawn(async move {
1846 let ranges = trailing_whitespace_ranges(&old_text);
1847 let empty = Arc::<str>::from("");
1848 Diff {
1849 base_version,
1850 line_ending,
1851 edits: ranges
1852 .into_iter()
1853 .map(|range| (range, empty.clone()))
1854 .collect(),
1855 }
1856 })
1857 }
1858
1859 /// Ensures that the buffer ends with a single newline character, and
1860 /// no other whitespace.
1861 pub fn ensure_final_newline(&mut self, cx: &mut Context<Self>) {
1862 let len = self.len();
1863 let mut offset = len;
1864 for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
1865 let non_whitespace_len = chunk
1866 .trim_end_matches(|c: char| c.is_ascii_whitespace())
1867 .len();
1868 offset -= chunk.len();
1869 offset += non_whitespace_len;
1870 if non_whitespace_len != 0 {
1871 if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
1872 return;
1873 }
1874 break;
1875 }
1876 }
1877 self.edit([(offset..len, "\n")], None, cx);
1878 }
1879
1880 /// Applies a diff to the buffer. If the buffer has changed since the given diff was
1881 /// calculated, then adjust the diff to account for those changes, and discard any
1882 /// parts of the diff that conflict with those changes.
1883 pub fn apply_diff(&mut self, diff: Diff, cx: &mut Context<Self>) -> Option<TransactionId> {
1884 let snapshot = self.snapshot();
1885 let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
1886 let mut delta = 0;
1887 let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
1888 while let Some(edit_since) = edits_since.peek() {
1889 // If the edit occurs after a diff hunk, then it does not
1890 // affect that hunk.
1891 if edit_since.old.start > range.end {
1892 break;
1893 }
1894 // If the edit precedes the diff hunk, then adjust the hunk
1895 // to reflect the edit.
1896 else if edit_since.old.end < range.start {
1897 delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
1898 edits_since.next();
1899 }
1900 // If the edit intersects a diff hunk, then discard that hunk.
1901 else {
1902 return None;
1903 }
1904 }
1905
1906 let start = (range.start as i64 + delta) as usize;
1907 let end = (range.end as i64 + delta) as usize;
1908 Some((start..end, new_text))
1909 });
1910
1911 self.start_transaction();
1912 self.text.set_line_ending(diff.line_ending);
1913 self.edit(adjusted_edits, None, cx);
1914 self.end_transaction(cx)
1915 }
1916
1917 fn has_unsaved_edits(&self) -> bool {
1918 let (last_version, has_unsaved_edits) = self.has_unsaved_edits.take();
1919
1920 if last_version == self.version {
1921 self.has_unsaved_edits
1922 .set((last_version, has_unsaved_edits));
1923 return has_unsaved_edits;
1924 }
1925
1926 let has_edits = self.has_edits_since(&self.saved_version);
1927 self.has_unsaved_edits
1928 .set((self.version.clone(), has_edits));
1929 has_edits
1930 }
1931
1932 /// Checks if the buffer has unsaved changes.
1933 pub fn is_dirty(&self) -> bool {
1934 if self.capability == Capability::ReadOnly {
1935 return false;
1936 }
1937 if self.has_conflict {
1938 return true;
1939 }
1940 match self.file.as_ref().map(|f| f.disk_state()) {
1941 Some(DiskState::New) | Some(DiskState::Deleted) => {
1942 !self.is_empty() && self.has_unsaved_edits()
1943 }
1944 _ => self.has_unsaved_edits(),
1945 }
1946 }
1947
1948 /// Checks if the buffer and its file have both changed since the buffer
1949 /// was last saved or reloaded.
1950 pub fn has_conflict(&self) -> bool {
1951 if self.has_conflict {
1952 return true;
1953 }
1954 let Some(file) = self.file.as_ref() else {
1955 return false;
1956 };
1957 match file.disk_state() {
1958 DiskState::New => false,
1959 DiskState::Present { mtime } => match self.saved_mtime {
1960 Some(saved_mtime) => {
1961 mtime.bad_is_greater_than(saved_mtime) && self.has_unsaved_edits()
1962 }
1963 None => true,
1964 },
1965 DiskState::Deleted => false,
1966 }
1967 }
1968
1969 /// Gets a [`Subscription`] that tracks all of the changes to the buffer's text.
1970 pub fn subscribe(&mut self) -> Subscription {
1971 self.text.subscribe()
1972 }
1973
1974 /// Adds a bit to the list of bits that are set when the buffer's text changes.
1975 ///
1976 /// This allows downstream code to check if the buffer's text has changed without
1977 /// waiting for an effect cycle, which would be required if using eents.
1978 pub fn record_changes(&mut self, bit: rc::Weak<Cell<bool>>) {
1979 if let Err(ix) = self
1980 .change_bits
1981 .binary_search_by_key(&rc::Weak::as_ptr(&bit), rc::Weak::as_ptr)
1982 {
1983 self.change_bits.insert(ix, bit);
1984 }
1985 }
1986
1987 fn was_changed(&mut self) {
1988 self.change_bits.retain(|change_bit| {
1989 change_bit.upgrade().map_or(false, |bit| {
1990 bit.replace(true);
1991 true
1992 })
1993 });
1994 }
1995
1996 /// Starts a transaction, if one is not already in-progress. When undoing or
1997 /// redoing edits, all of the edits performed within a transaction are undone
1998 /// or redone together.
1999 pub fn start_transaction(&mut self) -> Option<TransactionId> {
2000 self.start_transaction_at(Instant::now())
2001 }
2002
2003 /// Starts a transaction, providing the current time. Subsequent transactions
2004 /// that occur within a short period of time will be grouped together. This
2005 /// is controlled by the buffer's undo grouping duration.
2006 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
2007 self.transaction_depth += 1;
2008 if self.was_dirty_before_starting_transaction.is_none() {
2009 self.was_dirty_before_starting_transaction = Some(self.is_dirty());
2010 }
2011 self.text.start_transaction_at(now)
2012 }
2013
2014 /// Terminates the current transaction, if this is the outermost transaction.
2015 pub fn end_transaction(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2016 self.end_transaction_at(Instant::now(), cx)
2017 }
2018
2019 /// Terminates the current transaction, providing the current time. Subsequent transactions
2020 /// that occur within a short period of time will be grouped together. This
2021 /// is controlled by the buffer's undo grouping duration.
2022 pub fn end_transaction_at(
2023 &mut self,
2024 now: Instant,
2025 cx: &mut Context<Self>,
2026 ) -> Option<TransactionId> {
2027 assert!(self.transaction_depth > 0);
2028 self.transaction_depth -= 1;
2029 let was_dirty = if self.transaction_depth == 0 {
2030 self.was_dirty_before_starting_transaction.take().unwrap()
2031 } else {
2032 false
2033 };
2034 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
2035 self.did_edit(&start_version, was_dirty, cx);
2036 Some(transaction_id)
2037 } else {
2038 None
2039 }
2040 }
2041
2042 /// Manually add a transaction to the buffer's undo history.
2043 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
2044 self.text.push_transaction(transaction, now);
2045 }
2046
2047 /// Prevent the last transaction from being grouped with any subsequent transactions,
2048 /// even if they occur with the buffer's undo grouping duration.
2049 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
2050 self.text.finalize_last_transaction()
2051 }
2052
2053 /// Manually group all changes since a given transaction.
2054 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
2055 self.text.group_until_transaction(transaction_id);
2056 }
2057
2058 /// Manually remove a transaction from the buffer's undo history
2059 pub fn forget_transaction(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
2060 self.text.forget_transaction(transaction_id)
2061 }
2062
2063 /// Retrieve a transaction from the buffer's undo history
2064 pub fn get_transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> {
2065 self.text.get_transaction(transaction_id)
2066 }
2067
2068 /// Manually merge two transactions in the buffer's undo history.
2069 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
2070 self.text.merge_transactions(transaction, destination);
2071 }
2072
2073 /// Waits for the buffer to receive operations with the given timestamps.
2074 pub fn wait_for_edits<It: IntoIterator<Item = clock::Lamport>>(
2075 &mut self,
2076 edit_ids: It,
2077 ) -> impl Future<Output = Result<()>> + use<It> {
2078 self.text.wait_for_edits(edit_ids)
2079 }
2080
2081 /// Waits for the buffer to receive the operations necessary for resolving the given anchors.
2082 pub fn wait_for_anchors<It: IntoIterator<Item = Anchor>>(
2083 &mut self,
2084 anchors: It,
2085 ) -> impl 'static + Future<Output = Result<()>> + use<It> {
2086 self.text.wait_for_anchors(anchors)
2087 }
2088
2089 /// Waits for the buffer to receive operations up to the given version.
2090 pub fn wait_for_version(
2091 &mut self,
2092 version: clock::Global,
2093 ) -> impl Future<Output = Result<()>> + use<> {
2094 self.text.wait_for_version(version)
2095 }
2096
2097 /// Forces all futures returned by [`Buffer::wait_for_version`], [`Buffer::wait_for_edits`], or
2098 /// [`Buffer::wait_for_version`] to resolve with an error.
2099 pub fn give_up_waiting(&mut self) {
2100 self.text.give_up_waiting();
2101 }
2102
2103 /// Stores a set of selections that should be broadcasted to all of the buffer's replicas.
2104 pub fn set_active_selections(
2105 &mut self,
2106 selections: Arc<[Selection<Anchor>]>,
2107 line_mode: bool,
2108 cursor_shape: CursorShape,
2109 cx: &mut Context<Self>,
2110 ) {
2111 let lamport_timestamp = self.text.lamport_clock.tick();
2112 self.remote_selections.insert(
2113 self.text.replica_id(),
2114 SelectionSet {
2115 selections: selections.clone(),
2116 lamport_timestamp,
2117 line_mode,
2118 cursor_shape,
2119 },
2120 );
2121 self.send_operation(
2122 Operation::UpdateSelections {
2123 selections,
2124 line_mode,
2125 lamport_timestamp,
2126 cursor_shape,
2127 },
2128 true,
2129 cx,
2130 );
2131 self.non_text_state_update_count += 1;
2132 cx.notify();
2133 }
2134
2135 /// Clears the selections, so that other replicas of the buffer do not see any selections for
2136 /// this replica.
2137 pub fn remove_active_selections(&mut self, cx: &mut Context<Self>) {
2138 if self
2139 .remote_selections
2140 .get(&self.text.replica_id())
2141 .map_or(true, |set| !set.selections.is_empty())
2142 {
2143 self.set_active_selections(Arc::default(), false, Default::default(), cx);
2144 }
2145 }
2146
2147 pub fn set_agent_selections(
2148 &mut self,
2149 selections: Arc<[Selection<Anchor>]>,
2150 line_mode: bool,
2151 cursor_shape: CursorShape,
2152 cx: &mut Context<Self>,
2153 ) {
2154 let lamport_timestamp = self.text.lamport_clock.tick();
2155 self.remote_selections.insert(
2156 AGENT_REPLICA_ID,
2157 SelectionSet {
2158 selections: selections.clone(),
2159 lamport_timestamp,
2160 line_mode,
2161 cursor_shape,
2162 },
2163 );
2164 self.non_text_state_update_count += 1;
2165 cx.notify();
2166 }
2167
2168 pub fn remove_agent_selections(&mut self, cx: &mut Context<Self>) {
2169 self.set_agent_selections(Arc::default(), false, Default::default(), cx);
2170 }
2171
2172 /// Replaces the buffer's entire text.
2173 pub fn set_text<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2174 where
2175 T: Into<Arc<str>>,
2176 {
2177 self.autoindent_requests.clear();
2178 self.edit([(0..self.len(), text)], None, cx)
2179 }
2180
2181 /// Appends the given text to the end of the buffer.
2182 pub fn append<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2183 where
2184 T: Into<Arc<str>>,
2185 {
2186 self.edit([(self.len()..self.len(), text)], None, cx)
2187 }
2188
2189 /// Applies the given edits to the buffer. Each edit is specified as a range of text to
2190 /// delete, and a string of text to insert at that location.
2191 ///
2192 /// If an [`AutoindentMode`] is provided, then the buffer will enqueue an auto-indent
2193 /// request for the edited ranges, which will be processed when the buffer finishes
2194 /// parsing.
2195 ///
2196 /// Parsing takes place at the end of a transaction, and may compute synchronously
2197 /// or asynchronously, depending on the changes.
2198 pub fn edit<I, S, T>(
2199 &mut self,
2200 edits_iter: I,
2201 autoindent_mode: Option<AutoindentMode>,
2202 cx: &mut Context<Self>,
2203 ) -> Option<clock::Lamport>
2204 where
2205 I: IntoIterator<Item = (Range<S>, T)>,
2206 S: ToOffset,
2207 T: Into<Arc<str>>,
2208 {
2209 // Skip invalid edits and coalesce contiguous ones.
2210 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
2211
2212 for (range, new_text) in edits_iter {
2213 let mut range = range.start.to_offset(self)..range.end.to_offset(self);
2214
2215 if range.start > range.end {
2216 mem::swap(&mut range.start, &mut range.end);
2217 }
2218 let new_text = new_text.into();
2219 if !new_text.is_empty() || !range.is_empty() {
2220 if let Some((prev_range, prev_text)) = edits.last_mut() {
2221 if prev_range.end >= range.start {
2222 prev_range.end = cmp::max(prev_range.end, range.end);
2223 *prev_text = format!("{prev_text}{new_text}").into();
2224 } else {
2225 edits.push((range, new_text));
2226 }
2227 } else {
2228 edits.push((range, new_text));
2229 }
2230 }
2231 }
2232 if edits.is_empty() {
2233 return None;
2234 }
2235
2236 self.start_transaction();
2237 self.pending_autoindent.take();
2238 let autoindent_request = autoindent_mode
2239 .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
2240
2241 let edit_operation = self.text.edit(edits.iter().cloned());
2242 let edit_id = edit_operation.timestamp();
2243
2244 if let Some((before_edit, mode)) = autoindent_request {
2245 let mut delta = 0isize;
2246 let entries = edits
2247 .into_iter()
2248 .enumerate()
2249 .zip(&edit_operation.as_edit().unwrap().new_text)
2250 .map(|((ix, (range, _)), new_text)| {
2251 let new_text_length = new_text.len();
2252 let old_start = range.start.to_point(&before_edit);
2253 let new_start = (delta + range.start as isize) as usize;
2254 let range_len = range.end - range.start;
2255 delta += new_text_length as isize - range_len as isize;
2256
2257 // Decide what range of the insertion to auto-indent, and whether
2258 // the first line of the insertion should be considered a newly-inserted line
2259 // or an edit to an existing line.
2260 let mut range_of_insertion_to_indent = 0..new_text_length;
2261 let mut first_line_is_new = true;
2262
2263 let old_line_start = before_edit.indent_size_for_line(old_start.row).len;
2264 let old_line_end = before_edit.line_len(old_start.row);
2265
2266 if old_start.column > old_line_start {
2267 first_line_is_new = false;
2268 }
2269
2270 if !new_text.contains('\n')
2271 && (old_start.column + (range_len as u32) < old_line_end
2272 || old_line_end == old_line_start)
2273 {
2274 first_line_is_new = false;
2275 }
2276
2277 // When inserting text starting with a newline, avoid auto-indenting the
2278 // previous line.
2279 if new_text.starts_with('\n') {
2280 range_of_insertion_to_indent.start += 1;
2281 first_line_is_new = true;
2282 }
2283
2284 let mut original_indent_column = None;
2285 if let AutoindentMode::Block {
2286 original_indent_columns,
2287 } = &mode
2288 {
2289 original_indent_column = Some(if new_text.starts_with('\n') {
2290 indent_size_for_text(
2291 new_text[range_of_insertion_to_indent.clone()].chars(),
2292 )
2293 .len
2294 } else {
2295 original_indent_columns
2296 .get(ix)
2297 .copied()
2298 .flatten()
2299 .unwrap_or_else(|| {
2300 indent_size_for_text(
2301 new_text[range_of_insertion_to_indent.clone()].chars(),
2302 )
2303 .len
2304 })
2305 });
2306
2307 // Avoid auto-indenting the line after the edit.
2308 if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
2309 range_of_insertion_to_indent.end -= 1;
2310 }
2311 }
2312
2313 AutoindentRequestEntry {
2314 first_line_is_new,
2315 original_indent_column,
2316 indent_size: before_edit.language_indent_size_at(range.start, cx),
2317 range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
2318 ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
2319 }
2320 })
2321 .collect();
2322
2323 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2324 before_edit,
2325 entries,
2326 is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
2327 ignore_empty_lines: false,
2328 }));
2329 }
2330
2331 self.end_transaction(cx);
2332 self.send_operation(Operation::Buffer(edit_operation), true, cx);
2333 Some(edit_id)
2334 }
2335
2336 fn did_edit(&mut self, old_version: &clock::Global, was_dirty: bool, cx: &mut Context<Self>) {
2337 self.was_changed();
2338
2339 if self.edits_since::<usize>(old_version).next().is_none() {
2340 return;
2341 }
2342
2343 self.reparse(cx);
2344 cx.emit(BufferEvent::Edited);
2345 if was_dirty != self.is_dirty() {
2346 cx.emit(BufferEvent::DirtyChanged);
2347 }
2348 cx.notify();
2349 }
2350
2351 pub fn autoindent_ranges<I, T>(&mut self, ranges: I, cx: &mut Context<Self>)
2352 where
2353 I: IntoIterator<Item = Range<T>>,
2354 T: ToOffset + Copy,
2355 {
2356 let before_edit = self.snapshot();
2357 let entries = ranges
2358 .into_iter()
2359 .map(|range| AutoindentRequestEntry {
2360 range: before_edit.anchor_before(range.start)..before_edit.anchor_after(range.end),
2361 first_line_is_new: true,
2362 indent_size: before_edit.language_indent_size_at(range.start, cx),
2363 original_indent_column: None,
2364 })
2365 .collect();
2366 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2367 before_edit,
2368 entries,
2369 is_block_mode: false,
2370 ignore_empty_lines: true,
2371 }));
2372 self.request_autoindent(cx);
2373 }
2374
2375 // Inserts newlines at the given position to create an empty line, returning the start of the new line.
2376 // You can also request the insertion of empty lines above and below the line starting at the returned point.
2377 pub fn insert_empty_line(
2378 &mut self,
2379 position: impl ToPoint,
2380 space_above: bool,
2381 space_below: bool,
2382 cx: &mut Context<Self>,
2383 ) -> Point {
2384 let mut position = position.to_point(self);
2385
2386 self.start_transaction();
2387
2388 self.edit(
2389 [(position..position, "\n")],
2390 Some(AutoindentMode::EachLine),
2391 cx,
2392 );
2393
2394 if position.column > 0 {
2395 position += Point::new(1, 0);
2396 }
2397
2398 if !self.is_line_blank(position.row) {
2399 self.edit(
2400 [(position..position, "\n")],
2401 Some(AutoindentMode::EachLine),
2402 cx,
2403 );
2404 }
2405
2406 if space_above && position.row > 0 && !self.is_line_blank(position.row - 1) {
2407 self.edit(
2408 [(position..position, "\n")],
2409 Some(AutoindentMode::EachLine),
2410 cx,
2411 );
2412 position.row += 1;
2413 }
2414
2415 if space_below
2416 && (position.row == self.max_point().row || !self.is_line_blank(position.row + 1))
2417 {
2418 self.edit(
2419 [(position..position, "\n")],
2420 Some(AutoindentMode::EachLine),
2421 cx,
2422 );
2423 }
2424
2425 self.end_transaction(cx);
2426
2427 position
2428 }
2429
2430 /// Applies the given remote operations to the buffer.
2431 pub fn apply_ops<I: IntoIterator<Item = Operation>>(&mut self, ops: I, cx: &mut Context<Self>) {
2432 self.pending_autoindent.take();
2433 let was_dirty = self.is_dirty();
2434 let old_version = self.version.clone();
2435 let mut deferred_ops = Vec::new();
2436 let buffer_ops = ops
2437 .into_iter()
2438 .filter_map(|op| match op {
2439 Operation::Buffer(op) => Some(op),
2440 _ => {
2441 if self.can_apply_op(&op) {
2442 self.apply_op(op, cx);
2443 } else {
2444 deferred_ops.push(op);
2445 }
2446 None
2447 }
2448 })
2449 .collect::<Vec<_>>();
2450 for operation in buffer_ops.iter() {
2451 self.send_operation(Operation::Buffer(operation.clone()), false, cx);
2452 }
2453 self.text.apply_ops(buffer_ops);
2454 self.deferred_ops.insert(deferred_ops);
2455 self.flush_deferred_ops(cx);
2456 self.did_edit(&old_version, was_dirty, cx);
2457 // Notify independently of whether the buffer was edited as the operations could include a
2458 // selection update.
2459 cx.notify();
2460 }
2461
2462 fn flush_deferred_ops(&mut self, cx: &mut Context<Self>) {
2463 let mut deferred_ops = Vec::new();
2464 for op in self.deferred_ops.drain().iter().cloned() {
2465 if self.can_apply_op(&op) {
2466 self.apply_op(op, cx);
2467 } else {
2468 deferred_ops.push(op);
2469 }
2470 }
2471 self.deferred_ops.insert(deferred_ops);
2472 }
2473
2474 pub fn has_deferred_ops(&self) -> bool {
2475 !self.deferred_ops.is_empty() || self.text.has_deferred_ops()
2476 }
2477
2478 fn can_apply_op(&self, operation: &Operation) -> bool {
2479 match operation {
2480 Operation::Buffer(_) => {
2481 unreachable!("buffer operations should never be applied at this layer")
2482 }
2483 Operation::UpdateDiagnostics {
2484 diagnostics: diagnostic_set,
2485 ..
2486 } => diagnostic_set.iter().all(|diagnostic| {
2487 self.text.can_resolve(&diagnostic.range.start)
2488 && self.text.can_resolve(&diagnostic.range.end)
2489 }),
2490 Operation::UpdateSelections { selections, .. } => selections
2491 .iter()
2492 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
2493 Operation::UpdateCompletionTriggers { .. } => true,
2494 }
2495 }
2496
2497 fn apply_op(&mut self, operation: Operation, cx: &mut Context<Self>) {
2498 match operation {
2499 Operation::Buffer(_) => {
2500 unreachable!("buffer operations should never be applied at this layer")
2501 }
2502 Operation::UpdateDiagnostics {
2503 server_id,
2504 diagnostics: diagnostic_set,
2505 lamport_timestamp,
2506 } => {
2507 let snapshot = self.snapshot();
2508 self.apply_diagnostic_update(
2509 server_id,
2510 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
2511 lamport_timestamp,
2512 cx,
2513 );
2514 }
2515 Operation::UpdateSelections {
2516 selections,
2517 lamport_timestamp,
2518 line_mode,
2519 cursor_shape,
2520 } => {
2521 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id) {
2522 if set.lamport_timestamp > lamport_timestamp {
2523 return;
2524 }
2525 }
2526
2527 self.remote_selections.insert(
2528 lamport_timestamp.replica_id,
2529 SelectionSet {
2530 selections,
2531 lamport_timestamp,
2532 line_mode,
2533 cursor_shape,
2534 },
2535 );
2536 self.text.lamport_clock.observe(lamport_timestamp);
2537 self.non_text_state_update_count += 1;
2538 }
2539 Operation::UpdateCompletionTriggers {
2540 triggers,
2541 lamport_timestamp,
2542 server_id,
2543 } => {
2544 if triggers.is_empty() {
2545 self.completion_triggers_per_language_server
2546 .remove(&server_id);
2547 self.completion_triggers = self
2548 .completion_triggers_per_language_server
2549 .values()
2550 .flat_map(|triggers| triggers.into_iter().cloned())
2551 .collect();
2552 } else {
2553 self.completion_triggers_per_language_server
2554 .insert(server_id, triggers.iter().cloned().collect());
2555 self.completion_triggers.extend(triggers);
2556 }
2557 self.text.lamport_clock.observe(lamport_timestamp);
2558 }
2559 }
2560 }
2561
2562 fn apply_diagnostic_update(
2563 &mut self,
2564 server_id: LanguageServerId,
2565 diagnostics: DiagnosticSet,
2566 lamport_timestamp: clock::Lamport,
2567 cx: &mut Context<Self>,
2568 ) {
2569 if lamport_timestamp > self.diagnostics_timestamp {
2570 let ix = self.diagnostics.binary_search_by_key(&server_id, |e| e.0);
2571 if diagnostics.is_empty() {
2572 if let Ok(ix) = ix {
2573 self.diagnostics.remove(ix);
2574 }
2575 } else {
2576 match ix {
2577 Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)),
2578 Ok(ix) => self.diagnostics[ix].1 = diagnostics,
2579 };
2580 }
2581 self.diagnostics_timestamp = lamport_timestamp;
2582 self.non_text_state_update_count += 1;
2583 self.text.lamport_clock.observe(lamport_timestamp);
2584 cx.notify();
2585 cx.emit(BufferEvent::DiagnosticsUpdated);
2586 }
2587 }
2588
2589 fn send_operation(&mut self, operation: Operation, is_local: bool, cx: &mut Context<Self>) {
2590 self.was_changed();
2591 cx.emit(BufferEvent::Operation {
2592 operation,
2593 is_local,
2594 });
2595 }
2596
2597 /// Removes the selections for a given peer.
2598 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut Context<Self>) {
2599 self.remote_selections.remove(&replica_id);
2600 cx.notify();
2601 }
2602
2603 /// Undoes the most recent transaction.
2604 pub fn undo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2605 let was_dirty = self.is_dirty();
2606 let old_version = self.version.clone();
2607
2608 if let Some((transaction_id, operation)) = self.text.undo() {
2609 self.send_operation(Operation::Buffer(operation), true, cx);
2610 self.did_edit(&old_version, was_dirty, cx);
2611 Some(transaction_id)
2612 } else {
2613 None
2614 }
2615 }
2616
2617 /// Manually undoes a specific transaction in the buffer's undo history.
2618 pub fn undo_transaction(
2619 &mut self,
2620 transaction_id: TransactionId,
2621 cx: &mut Context<Self>,
2622 ) -> bool {
2623 let was_dirty = self.is_dirty();
2624 let old_version = self.version.clone();
2625 if let Some(operation) = self.text.undo_transaction(transaction_id) {
2626 self.send_operation(Operation::Buffer(operation), true, cx);
2627 self.did_edit(&old_version, was_dirty, cx);
2628 true
2629 } else {
2630 false
2631 }
2632 }
2633
2634 /// Manually undoes all changes after a given transaction in the buffer's undo history.
2635 pub fn undo_to_transaction(
2636 &mut self,
2637 transaction_id: TransactionId,
2638 cx: &mut Context<Self>,
2639 ) -> bool {
2640 let was_dirty = self.is_dirty();
2641 let old_version = self.version.clone();
2642
2643 let operations = self.text.undo_to_transaction(transaction_id);
2644 let undone = !operations.is_empty();
2645 for operation in operations {
2646 self.send_operation(Operation::Buffer(operation), true, cx);
2647 }
2648 if undone {
2649 self.did_edit(&old_version, was_dirty, cx)
2650 }
2651 undone
2652 }
2653
2654 pub fn undo_operations(&mut self, counts: HashMap<Lamport, u32>, cx: &mut Context<Buffer>) {
2655 let was_dirty = self.is_dirty();
2656 let operation = self.text.undo_operations(counts);
2657 let old_version = self.version.clone();
2658 self.send_operation(Operation::Buffer(operation), true, cx);
2659 self.did_edit(&old_version, was_dirty, cx);
2660 }
2661
2662 /// Manually redoes a specific transaction in the buffer's redo history.
2663 pub fn redo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2664 let was_dirty = self.is_dirty();
2665 let old_version = self.version.clone();
2666
2667 if let Some((transaction_id, operation)) = self.text.redo() {
2668 self.send_operation(Operation::Buffer(operation), true, cx);
2669 self.did_edit(&old_version, was_dirty, cx);
2670 Some(transaction_id)
2671 } else {
2672 None
2673 }
2674 }
2675
2676 /// Manually undoes all changes until a given transaction in the buffer's redo history.
2677 pub fn redo_to_transaction(
2678 &mut self,
2679 transaction_id: TransactionId,
2680 cx: &mut Context<Self>,
2681 ) -> bool {
2682 let was_dirty = self.is_dirty();
2683 let old_version = self.version.clone();
2684
2685 let operations = self.text.redo_to_transaction(transaction_id);
2686 let redone = !operations.is_empty();
2687 for operation in operations {
2688 self.send_operation(Operation::Buffer(operation), true, cx);
2689 }
2690 if redone {
2691 self.did_edit(&old_version, was_dirty, cx)
2692 }
2693 redone
2694 }
2695
2696 /// Override current completion triggers with the user-provided completion triggers.
2697 pub fn set_completion_triggers(
2698 &mut self,
2699 server_id: LanguageServerId,
2700 triggers: BTreeSet<String>,
2701 cx: &mut Context<Self>,
2702 ) {
2703 self.completion_triggers_timestamp = self.text.lamport_clock.tick();
2704 if triggers.is_empty() {
2705 self.completion_triggers_per_language_server
2706 .remove(&server_id);
2707 self.completion_triggers = self
2708 .completion_triggers_per_language_server
2709 .values()
2710 .flat_map(|triggers| triggers.into_iter().cloned())
2711 .collect();
2712 } else {
2713 self.completion_triggers_per_language_server
2714 .insert(server_id, triggers.clone());
2715 self.completion_triggers.extend(triggers.iter().cloned());
2716 }
2717 self.send_operation(
2718 Operation::UpdateCompletionTriggers {
2719 triggers: triggers.into_iter().collect(),
2720 lamport_timestamp: self.completion_triggers_timestamp,
2721 server_id,
2722 },
2723 true,
2724 cx,
2725 );
2726 cx.notify();
2727 }
2728
2729 /// Returns a list of strings which trigger a completion menu for this language.
2730 /// Usually this is driven by LSP server which returns a list of trigger characters for completions.
2731 pub fn completion_triggers(&self) -> &BTreeSet<String> {
2732 &self.completion_triggers
2733 }
2734
2735 /// Call this directly after performing edits to prevent the preview tab
2736 /// from being dismissed by those edits. It causes `should_dismiss_preview`
2737 /// to return false until there are additional edits.
2738 pub fn refresh_preview(&mut self) {
2739 self.preview_version = self.version.clone();
2740 }
2741
2742 /// Whether we should preserve the preview status of a tab containing this buffer.
2743 pub fn preserve_preview(&self) -> bool {
2744 !self.has_edits_since(&self.preview_version)
2745 }
2746
2747 pub fn result_id(&self) -> Option<String> {
2748 self.pull_diagnostics_result_id.clone()
2749 }
2750
2751 pub fn set_result_id(&mut self, result_id: Option<String>) {
2752 self.pull_diagnostics_result_id = result_id;
2753 }
2754}
2755
2756#[doc(hidden)]
2757#[cfg(any(test, feature = "test-support"))]
2758impl Buffer {
2759 pub fn edit_via_marked_text(
2760 &mut self,
2761 marked_string: &str,
2762 autoindent_mode: Option<AutoindentMode>,
2763 cx: &mut Context<Self>,
2764 ) {
2765 let edits = self.edits_for_marked_text(marked_string);
2766 self.edit(edits, autoindent_mode, cx);
2767 }
2768
2769 pub fn set_group_interval(&mut self, group_interval: Duration) {
2770 self.text.set_group_interval(group_interval);
2771 }
2772
2773 pub fn randomly_edit<T>(&mut self, rng: &mut T, old_range_count: usize, cx: &mut Context<Self>)
2774 where
2775 T: rand::Rng,
2776 {
2777 let mut edits: Vec<(Range<usize>, String)> = Vec::new();
2778 let mut last_end = None;
2779 for _ in 0..old_range_count {
2780 if last_end.map_or(false, |last_end| last_end >= self.len()) {
2781 break;
2782 }
2783
2784 let new_start = last_end.map_or(0, |last_end| last_end + 1);
2785 let mut range = self.random_byte_range(new_start, rng);
2786 if rng.gen_bool(0.2) {
2787 mem::swap(&mut range.start, &mut range.end);
2788 }
2789 last_end = Some(range.end);
2790
2791 let new_text_len = rng.gen_range(0..10);
2792 let mut new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
2793 new_text = new_text.to_uppercase();
2794
2795 edits.push((range, new_text));
2796 }
2797 log::info!("mutating buffer {} with {:?}", self.replica_id(), edits);
2798 self.edit(edits, None, cx);
2799 }
2800
2801 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut Context<Self>) {
2802 let was_dirty = self.is_dirty();
2803 let old_version = self.version.clone();
2804
2805 let ops = self.text.randomly_undo_redo(rng);
2806 if !ops.is_empty() {
2807 for op in ops {
2808 self.send_operation(Operation::Buffer(op), true, cx);
2809 self.did_edit(&old_version, was_dirty, cx);
2810 }
2811 }
2812 }
2813}
2814
2815impl EventEmitter<BufferEvent> for Buffer {}
2816
2817impl Deref for Buffer {
2818 type Target = TextBuffer;
2819
2820 fn deref(&self) -> &Self::Target {
2821 &self.text
2822 }
2823}
2824
2825impl BufferSnapshot {
2826 /// Returns [`IndentSize`] for a given line that respects user settings and
2827 /// language preferences.
2828 pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
2829 indent_size_for_line(self, row)
2830 }
2831
2832 /// Returns [`IndentSize`] for a given position that respects user settings
2833 /// and language preferences.
2834 pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &App) -> IndentSize {
2835 let settings = language_settings(
2836 self.language_at(position).map(|l| l.name()),
2837 self.file(),
2838 cx,
2839 );
2840 if settings.hard_tabs {
2841 IndentSize::tab()
2842 } else {
2843 IndentSize::spaces(settings.tab_size.get())
2844 }
2845 }
2846
2847 /// Retrieve the suggested indent size for all of the given rows. The unit of indentation
2848 /// is passed in as `single_indent_size`.
2849 pub fn suggested_indents(
2850 &self,
2851 rows: impl Iterator<Item = u32>,
2852 single_indent_size: IndentSize,
2853 ) -> BTreeMap<u32, IndentSize> {
2854 let mut result = BTreeMap::new();
2855
2856 for row_range in contiguous_ranges(rows, 10) {
2857 let suggestions = match self.suggest_autoindents(row_range.clone()) {
2858 Some(suggestions) => suggestions,
2859 _ => break,
2860 };
2861
2862 for (row, suggestion) in row_range.zip(suggestions) {
2863 let indent_size = if let Some(suggestion) = suggestion {
2864 result
2865 .get(&suggestion.basis_row)
2866 .copied()
2867 .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
2868 .with_delta(suggestion.delta, single_indent_size)
2869 } else {
2870 self.indent_size_for_line(row)
2871 };
2872
2873 result.insert(row, indent_size);
2874 }
2875 }
2876
2877 result
2878 }
2879
2880 fn suggest_autoindents(
2881 &self,
2882 row_range: Range<u32>,
2883 ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
2884 let config = &self.language.as_ref()?.config;
2885 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
2886 let significant_indentation = config.significant_indentation;
2887
2888 // Find the suggested indentation ranges based on the syntax tree.
2889 let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
2890 let end = Point::new(row_range.end, 0);
2891 let range = (start..end).to_offset(&self.text);
2892 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2893 Some(&grammar.indents_config.as_ref()?.query)
2894 });
2895 let indent_configs = matches
2896 .grammars()
2897 .iter()
2898 .map(|grammar| grammar.indents_config.as_ref().unwrap())
2899 .collect::<Vec<_>>();
2900
2901 let mut indent_ranges = Vec::<Range<Point>>::new();
2902 let mut outdent_positions = Vec::<Point>::new();
2903 while let Some(mat) = matches.peek() {
2904 let mut start: Option<Point> = None;
2905 let mut end: Option<Point> = None;
2906 let mut outdent: Option<Point> = None;
2907
2908 let config = &indent_configs[mat.grammar_index];
2909 for capture in mat.captures {
2910 if capture.index == config.indent_capture_ix {
2911 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
2912 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
2913 } else if Some(capture.index) == config.start_capture_ix {
2914 start = Some(Point::from_ts_point(capture.node.end_position()));
2915 } else if Some(capture.index) == config.end_capture_ix {
2916 end = Some(Point::from_ts_point(capture.node.start_position()));
2917 } else if Some(capture.index) == config.outdent_capture_ix {
2918 let point = Point::from_ts_point(capture.node.start_position());
2919 outdent.get_or_insert(point);
2920 outdent_positions.push(point);
2921 }
2922 }
2923
2924 matches.advance();
2925 // in case of significant indentation expand end to outdent position
2926 let end = if significant_indentation {
2927 outdent.or(end)
2928 } else {
2929 end
2930 };
2931 if let Some((start, end)) = start.zip(end) {
2932 if start.row == end.row && (!significant_indentation || start.column < end.column) {
2933 continue;
2934 }
2935 let range = start..end;
2936 match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
2937 Err(ix) => indent_ranges.insert(ix, range),
2938 Ok(ix) => {
2939 let prev_range = &mut indent_ranges[ix];
2940 prev_range.end = prev_range.end.max(range.end);
2941 }
2942 }
2943 }
2944 }
2945
2946 let mut error_ranges = Vec::<Range<Point>>::new();
2947 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2948 grammar.error_query.as_ref()
2949 });
2950 while let Some(mat) = matches.peek() {
2951 let node = mat.captures[0].node;
2952 let start = Point::from_ts_point(node.start_position());
2953 let end = Point::from_ts_point(node.end_position());
2954 let range = start..end;
2955 let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
2956 Ok(ix) | Err(ix) => ix,
2957 };
2958 let mut end_ix = ix;
2959 while let Some(existing_range) = error_ranges.get(end_ix) {
2960 if existing_range.end < end {
2961 end_ix += 1;
2962 } else {
2963 break;
2964 }
2965 }
2966 error_ranges.splice(ix..end_ix, [range]);
2967 matches.advance();
2968 }
2969
2970 // we don't use outdent positions to truncate in case of significant indentation
2971 // rather we use them to expand (handled above)
2972 if !significant_indentation {
2973 outdent_positions.sort();
2974 for outdent_position in outdent_positions {
2975 // find the innermost indent range containing this outdent_position
2976 // set its end to the outdent position
2977 if let Some(range_to_truncate) = indent_ranges
2978 .iter_mut()
2979 .filter(|indent_range| indent_range.contains(&outdent_position))
2980 .next_back()
2981 {
2982 range_to_truncate.end = outdent_position;
2983 }
2984 }
2985 }
2986
2987 // Find the suggested indentation increases and decreased based on regexes.
2988 let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
2989 self.for_each_line(
2990 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
2991 ..Point::new(row_range.end, 0),
2992 |row, line| {
2993 if config
2994 .decrease_indent_pattern
2995 .as_ref()
2996 .map_or(false, |regex| regex.is_match(line))
2997 {
2998 indent_change_rows.push((row, Ordering::Less));
2999 }
3000 if config
3001 .increase_indent_pattern
3002 .as_ref()
3003 .map_or(false, |regex| regex.is_match(line))
3004 {
3005 indent_change_rows.push((row + 1, Ordering::Greater));
3006 }
3007 },
3008 );
3009
3010 let mut indent_changes = indent_change_rows.into_iter().peekable();
3011 let mut prev_row = if config.auto_indent_using_last_non_empty_line {
3012 prev_non_blank_row.unwrap_or(0)
3013 } else {
3014 row_range.start.saturating_sub(1)
3015 };
3016 let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
3017 Some(row_range.map(move |row| {
3018 let row_start = Point::new(row, self.indent_size_for_line(row).len);
3019
3020 let mut indent_from_prev_row = false;
3021 let mut outdent_from_prev_row = false;
3022 let mut outdent_to_row = u32::MAX;
3023 let mut from_regex = false;
3024
3025 while let Some((indent_row, delta)) = indent_changes.peek() {
3026 match indent_row.cmp(&row) {
3027 Ordering::Equal => match delta {
3028 Ordering::Less => {
3029 from_regex = true;
3030 outdent_from_prev_row = true
3031 }
3032 Ordering::Greater => {
3033 indent_from_prev_row = true;
3034 from_regex = true
3035 }
3036 _ => {}
3037 },
3038
3039 Ordering::Greater => break,
3040 Ordering::Less => {}
3041 }
3042
3043 indent_changes.next();
3044 }
3045
3046 for range in &indent_ranges {
3047 if range.start.row >= row {
3048 break;
3049 }
3050 if range.start.row == prev_row && range.end > row_start {
3051 indent_from_prev_row = true;
3052 }
3053 if significant_indentation && self.is_line_blank(row) && range.start.row == prev_row
3054 {
3055 indent_from_prev_row = true;
3056 }
3057 if !significant_indentation || !self.is_line_blank(row) {
3058 if range.end > prev_row_start && range.end <= row_start {
3059 outdent_to_row = outdent_to_row.min(range.start.row);
3060 }
3061 }
3062 }
3063
3064 let within_error = error_ranges
3065 .iter()
3066 .any(|e| e.start.row < row && e.end > row_start);
3067
3068 let suggestion = if outdent_to_row == prev_row
3069 || (outdent_from_prev_row && indent_from_prev_row)
3070 {
3071 Some(IndentSuggestion {
3072 basis_row: prev_row,
3073 delta: Ordering::Equal,
3074 within_error: within_error && !from_regex,
3075 })
3076 } else if indent_from_prev_row {
3077 Some(IndentSuggestion {
3078 basis_row: prev_row,
3079 delta: Ordering::Greater,
3080 within_error: within_error && !from_regex,
3081 })
3082 } else if outdent_to_row < prev_row {
3083 Some(IndentSuggestion {
3084 basis_row: outdent_to_row,
3085 delta: Ordering::Equal,
3086 within_error: within_error && !from_regex,
3087 })
3088 } else if outdent_from_prev_row {
3089 Some(IndentSuggestion {
3090 basis_row: prev_row,
3091 delta: Ordering::Less,
3092 within_error: within_error && !from_regex,
3093 })
3094 } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
3095 {
3096 Some(IndentSuggestion {
3097 basis_row: prev_row,
3098 delta: Ordering::Equal,
3099 within_error: within_error && !from_regex,
3100 })
3101 } else {
3102 None
3103 };
3104
3105 prev_row = row;
3106 prev_row_start = row_start;
3107 suggestion
3108 }))
3109 }
3110
3111 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
3112 while row > 0 {
3113 row -= 1;
3114 if !self.is_line_blank(row) {
3115 return Some(row);
3116 }
3117 }
3118 None
3119 }
3120
3121 fn get_highlights(&self, range: Range<usize>) -> (SyntaxMapCaptures, Vec<HighlightMap>) {
3122 let captures = self.syntax.captures(range, &self.text, |grammar| {
3123 grammar.highlights_query.as_ref()
3124 });
3125 let highlight_maps = captures
3126 .grammars()
3127 .iter()
3128 .map(|grammar| grammar.highlight_map())
3129 .collect();
3130 (captures, highlight_maps)
3131 }
3132
3133 /// Iterates over chunks of text in the given range of the buffer. Text is chunked
3134 /// in an arbitrary way due to being stored in a [`Rope`](text::Rope). The text is also
3135 /// returned in chunks where each chunk has a single syntax highlighting style and
3136 /// diagnostic status.
3137 pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks {
3138 let range = range.start.to_offset(self)..range.end.to_offset(self);
3139
3140 let mut syntax = None;
3141 if language_aware {
3142 syntax = Some(self.get_highlights(range.clone()));
3143 }
3144 // We want to look at diagnostic spans only when iterating over language-annotated chunks.
3145 let diagnostics = language_aware;
3146 BufferChunks::new(self.text.as_rope(), range, syntax, diagnostics, Some(self))
3147 }
3148
3149 pub fn highlighted_text_for_range<T: ToOffset>(
3150 &self,
3151 range: Range<T>,
3152 override_style: Option<HighlightStyle>,
3153 syntax_theme: &SyntaxTheme,
3154 ) -> HighlightedText {
3155 HighlightedText::from_buffer_range(
3156 range,
3157 &self.text,
3158 &self.syntax,
3159 override_style,
3160 syntax_theme,
3161 )
3162 }
3163
3164 /// Invokes the given callback for each line of text in the given range of the buffer.
3165 /// Uses callback to avoid allocating a string for each line.
3166 fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
3167 let mut line = String::new();
3168 let mut row = range.start.row;
3169 for chunk in self
3170 .as_rope()
3171 .chunks_in_range(range.to_offset(self))
3172 .chain(["\n"])
3173 {
3174 for (newline_ix, text) in chunk.split('\n').enumerate() {
3175 if newline_ix > 0 {
3176 callback(row, &line);
3177 row += 1;
3178 line.clear();
3179 }
3180 line.push_str(text);
3181 }
3182 }
3183 }
3184
3185 /// Iterates over every [`SyntaxLayer`] in the buffer.
3186 pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayer> + '_ {
3187 self.syntax
3188 .layers_for_range(0..self.len(), &self.text, true)
3189 }
3190
3191 pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayer> {
3192 let offset = position.to_offset(self);
3193 self.syntax
3194 .layers_for_range(offset..offset, &self.text, false)
3195 .filter(|l| l.node().end_byte() > offset)
3196 .last()
3197 }
3198
3199 pub fn smallest_syntax_layer_containing<D: ToOffset>(
3200 &self,
3201 range: Range<D>,
3202 ) -> Option<SyntaxLayer> {
3203 let range = range.to_offset(self);
3204 return self
3205 .syntax
3206 .layers_for_range(range, &self.text, false)
3207 .max_by(|a, b| {
3208 if a.depth != b.depth {
3209 a.depth.cmp(&b.depth)
3210 } else if a.offset.0 != b.offset.0 {
3211 a.offset.0.cmp(&b.offset.0)
3212 } else {
3213 a.node().end_byte().cmp(&b.node().end_byte()).reverse()
3214 }
3215 });
3216 }
3217
3218 /// Returns the main [`Language`].
3219 pub fn language(&self) -> Option<&Arc<Language>> {
3220 self.language.as_ref()
3221 }
3222
3223 /// Returns the [`Language`] at the given location.
3224 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
3225 self.syntax_layer_at(position)
3226 .map(|info| info.language)
3227 .or(self.language.as_ref())
3228 }
3229
3230 /// Returns the settings for the language at the given location.
3231 pub fn settings_at<'a, D: ToOffset>(
3232 &'a self,
3233 position: D,
3234 cx: &'a App,
3235 ) -> Cow<'a, LanguageSettings> {
3236 language_settings(
3237 self.language_at(position).map(|l| l.name()),
3238 self.file.as_ref(),
3239 cx,
3240 )
3241 }
3242
3243 pub fn char_classifier_at<T: ToOffset>(&self, point: T) -> CharClassifier {
3244 CharClassifier::new(self.language_scope_at(point))
3245 }
3246
3247 /// Returns the [`LanguageScope`] at the given location.
3248 pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
3249 let offset = position.to_offset(self);
3250 let mut scope = None;
3251 let mut smallest_range_and_depth: Option<(Range<usize>, usize)> = None;
3252
3253 // Use the layer that has the smallest node intersecting the given point.
3254 for layer in self
3255 .syntax
3256 .layers_for_range(offset..offset, &self.text, false)
3257 {
3258 let mut cursor = layer.node().walk();
3259
3260 let mut range = None;
3261 loop {
3262 let child_range = cursor.node().byte_range();
3263 if !child_range.contains(&offset) {
3264 break;
3265 }
3266
3267 range = Some(child_range);
3268 if cursor.goto_first_child_for_byte(offset).is_none() {
3269 break;
3270 }
3271 }
3272
3273 if let Some(range) = range {
3274 if smallest_range_and_depth.as_ref().map_or(
3275 true,
3276 |(smallest_range, smallest_range_depth)| {
3277 if layer.depth > *smallest_range_depth {
3278 true
3279 } else if layer.depth == *smallest_range_depth {
3280 range.len() < smallest_range.len()
3281 } else {
3282 false
3283 }
3284 },
3285 ) {
3286 smallest_range_and_depth = Some((range, layer.depth));
3287 scope = Some(LanguageScope {
3288 language: layer.language.clone(),
3289 override_id: layer.override_id(offset, &self.text),
3290 });
3291 }
3292 }
3293 }
3294
3295 scope.or_else(|| {
3296 self.language.clone().map(|language| LanguageScope {
3297 language,
3298 override_id: None,
3299 })
3300 })
3301 }
3302
3303 /// Returns a tuple of the range and character kind of the word
3304 /// surrounding the given position.
3305 pub fn surrounding_word<T: ToOffset>(&self, start: T) -> (Range<usize>, Option<CharKind>) {
3306 let mut start = start.to_offset(self);
3307 let mut end = start;
3308 let mut next_chars = self.chars_at(start).take(128).peekable();
3309 let mut prev_chars = self.reversed_chars_at(start).take(128).peekable();
3310
3311 let classifier = self.char_classifier_at(start);
3312 let word_kind = cmp::max(
3313 prev_chars.peek().copied().map(|c| classifier.kind(c)),
3314 next_chars.peek().copied().map(|c| classifier.kind(c)),
3315 );
3316
3317 for ch in prev_chars {
3318 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3319 start -= ch.len_utf8();
3320 } else {
3321 break;
3322 }
3323 }
3324
3325 for ch in next_chars {
3326 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3327 end += ch.len_utf8();
3328 } else {
3329 break;
3330 }
3331 }
3332
3333 (start..end, word_kind)
3334 }
3335
3336 /// Returns the closest syntax node enclosing the given range.
3337 pub fn syntax_ancestor<'a, T: ToOffset>(
3338 &'a self,
3339 range: Range<T>,
3340 ) -> Option<tree_sitter::Node<'a>> {
3341 let range = range.start.to_offset(self)..range.end.to_offset(self);
3342 let mut result: Option<tree_sitter::Node<'a>> = None;
3343 'outer: for layer in self
3344 .syntax
3345 .layers_for_range(range.clone(), &self.text, true)
3346 {
3347 let mut cursor = layer.node().walk();
3348
3349 // Descend to the first leaf that touches the start of the range.
3350 //
3351 // If the range is non-empty and the current node ends exactly at the start,
3352 // move to the next sibling to find a node that extends beyond the start.
3353 //
3354 // If the range is empty and the current node starts after the range position,
3355 // move to the previous sibling to find the node that contains the position.
3356 while cursor.goto_first_child_for_byte(range.start).is_some() {
3357 if !range.is_empty() && cursor.node().end_byte() == range.start {
3358 cursor.goto_next_sibling();
3359 }
3360 if range.is_empty() && cursor.node().start_byte() > range.start {
3361 cursor.goto_previous_sibling();
3362 }
3363 }
3364
3365 // Ascend to the smallest ancestor that strictly contains the range.
3366 loop {
3367 let node_range = cursor.node().byte_range();
3368 if node_range.start <= range.start
3369 && node_range.end >= range.end
3370 && node_range.len() > range.len()
3371 {
3372 break;
3373 }
3374 if !cursor.goto_parent() {
3375 continue 'outer;
3376 }
3377 }
3378
3379 let left_node = cursor.node();
3380 let mut layer_result = left_node;
3381
3382 // For an empty range, try to find another node immediately to the right of the range.
3383 if left_node.end_byte() == range.start {
3384 let mut right_node = None;
3385 while !cursor.goto_next_sibling() {
3386 if !cursor.goto_parent() {
3387 break;
3388 }
3389 }
3390
3391 while cursor.node().start_byte() == range.start {
3392 right_node = Some(cursor.node());
3393 if !cursor.goto_first_child() {
3394 break;
3395 }
3396 }
3397
3398 // If there is a candidate node on both sides of the (empty) range, then
3399 // decide between the two by favoring a named node over an anonymous token.
3400 // If both nodes are the same in that regard, favor the right one.
3401 if let Some(right_node) = right_node {
3402 if right_node.is_named() || !left_node.is_named() {
3403 layer_result = right_node;
3404 }
3405 }
3406 }
3407
3408 if let Some(previous_result) = &result {
3409 if previous_result.byte_range().len() < layer_result.byte_range().len() {
3410 continue;
3411 }
3412 }
3413 result = Some(layer_result);
3414 }
3415
3416 result
3417 }
3418
3419 /// Returns the root syntax node within the given row
3420 pub fn syntax_root_ancestor(&self, position: Anchor) -> Option<tree_sitter::Node> {
3421 let start_offset = position.to_offset(self);
3422
3423 let row = self.summary_for_anchor::<text::PointUtf16>(&position).row as usize;
3424
3425 let layer = self
3426 .syntax
3427 .layers_for_range(start_offset..start_offset, &self.text, true)
3428 .next()?;
3429
3430 let mut cursor = layer.node().walk();
3431
3432 // Descend to the first leaf that touches the start of the range.
3433 while cursor.goto_first_child_for_byte(start_offset).is_some() {
3434 if cursor.node().end_byte() == start_offset {
3435 cursor.goto_next_sibling();
3436 }
3437 }
3438
3439 // Ascend to the root node within the same row.
3440 while cursor.goto_parent() {
3441 if cursor.node().start_position().row != row {
3442 break;
3443 }
3444 }
3445
3446 return Some(cursor.node());
3447 }
3448
3449 /// Returns the outline for the buffer.
3450 ///
3451 /// This method allows passing an optional [`SyntaxTheme`] to
3452 /// syntax-highlight the returned symbols.
3453 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Option<Outline<Anchor>> {
3454 self.outline_items_containing(0..self.len(), true, theme)
3455 .map(Outline::new)
3456 }
3457
3458 /// Returns all the symbols that contain the given position.
3459 ///
3460 /// This method allows passing an optional [`SyntaxTheme`] to
3461 /// syntax-highlight the returned symbols.
3462 pub fn symbols_containing<T: ToOffset>(
3463 &self,
3464 position: T,
3465 theme: Option<&SyntaxTheme>,
3466 ) -> Option<Vec<OutlineItem<Anchor>>> {
3467 let position = position.to_offset(self);
3468 let mut items = self.outline_items_containing(
3469 position.saturating_sub(1)..self.len().min(position + 1),
3470 false,
3471 theme,
3472 )?;
3473 let mut prev_depth = None;
3474 items.retain(|item| {
3475 let result = prev_depth.map_or(true, |prev_depth| item.depth > prev_depth);
3476 prev_depth = Some(item.depth);
3477 result
3478 });
3479 Some(items)
3480 }
3481
3482 pub fn outline_range_containing<T: ToOffset>(&self, range: Range<T>) -> Option<Range<Point>> {
3483 let range = range.to_offset(self);
3484 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3485 grammar.outline_config.as_ref().map(|c| &c.query)
3486 });
3487 let configs = matches
3488 .grammars()
3489 .iter()
3490 .map(|g| g.outline_config.as_ref().unwrap())
3491 .collect::<Vec<_>>();
3492
3493 while let Some(mat) = matches.peek() {
3494 let config = &configs[mat.grammar_index];
3495 let containing_item_node = maybe!({
3496 let item_node = mat.captures.iter().find_map(|cap| {
3497 if cap.index == config.item_capture_ix {
3498 Some(cap.node)
3499 } else {
3500 None
3501 }
3502 })?;
3503
3504 let item_byte_range = item_node.byte_range();
3505 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3506 None
3507 } else {
3508 Some(item_node)
3509 }
3510 });
3511
3512 if let Some(item_node) = containing_item_node {
3513 return Some(
3514 Point::from_ts_point(item_node.start_position())
3515 ..Point::from_ts_point(item_node.end_position()),
3516 );
3517 }
3518
3519 matches.advance();
3520 }
3521 None
3522 }
3523
3524 pub fn outline_items_containing<T: ToOffset>(
3525 &self,
3526 range: Range<T>,
3527 include_extra_context: bool,
3528 theme: Option<&SyntaxTheme>,
3529 ) -> Option<Vec<OutlineItem<Anchor>>> {
3530 let range = range.to_offset(self);
3531 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3532 grammar.outline_config.as_ref().map(|c| &c.query)
3533 });
3534 let configs = matches
3535 .grammars()
3536 .iter()
3537 .map(|g| g.outline_config.as_ref().unwrap())
3538 .collect::<Vec<_>>();
3539
3540 let mut items = Vec::new();
3541 let mut annotation_row_ranges: Vec<Range<u32>> = Vec::new();
3542 while let Some(mat) = matches.peek() {
3543 let config = &configs[mat.grammar_index];
3544 if let Some(item) =
3545 self.next_outline_item(config, &mat, &range, include_extra_context, theme)
3546 {
3547 items.push(item);
3548 } else if let Some(capture) = mat
3549 .captures
3550 .iter()
3551 .find(|capture| Some(capture.index) == config.annotation_capture_ix)
3552 {
3553 let capture_range = capture.node.start_position()..capture.node.end_position();
3554 let mut capture_row_range =
3555 capture_range.start.row as u32..capture_range.end.row as u32;
3556 if capture_range.end.row > capture_range.start.row && capture_range.end.column == 0
3557 {
3558 capture_row_range.end -= 1;
3559 }
3560 if let Some(last_row_range) = annotation_row_ranges.last_mut() {
3561 if last_row_range.end >= capture_row_range.start.saturating_sub(1) {
3562 last_row_range.end = capture_row_range.end;
3563 } else {
3564 annotation_row_ranges.push(capture_row_range);
3565 }
3566 } else {
3567 annotation_row_ranges.push(capture_row_range);
3568 }
3569 }
3570 matches.advance();
3571 }
3572
3573 items.sort_by_key(|item| (item.range.start, Reverse(item.range.end)));
3574
3575 // Assign depths based on containment relationships and convert to anchors.
3576 let mut item_ends_stack = Vec::<Point>::new();
3577 let mut anchor_items = Vec::new();
3578 let mut annotation_row_ranges = annotation_row_ranges.into_iter().peekable();
3579 for item in items {
3580 while let Some(last_end) = item_ends_stack.last().copied() {
3581 if last_end < item.range.end {
3582 item_ends_stack.pop();
3583 } else {
3584 break;
3585 }
3586 }
3587
3588 let mut annotation_row_range = None;
3589 while let Some(next_annotation_row_range) = annotation_row_ranges.peek() {
3590 let row_preceding_item = item.range.start.row.saturating_sub(1);
3591 if next_annotation_row_range.end < row_preceding_item {
3592 annotation_row_ranges.next();
3593 } else {
3594 if next_annotation_row_range.end == row_preceding_item {
3595 annotation_row_range = Some(next_annotation_row_range.clone());
3596 annotation_row_ranges.next();
3597 }
3598 break;
3599 }
3600 }
3601
3602 anchor_items.push(OutlineItem {
3603 depth: item_ends_stack.len(),
3604 range: self.anchor_after(item.range.start)..self.anchor_before(item.range.end),
3605 text: item.text,
3606 highlight_ranges: item.highlight_ranges,
3607 name_ranges: item.name_ranges,
3608 body_range: item.body_range.map(|body_range| {
3609 self.anchor_after(body_range.start)..self.anchor_before(body_range.end)
3610 }),
3611 annotation_range: annotation_row_range.map(|annotation_range| {
3612 self.anchor_after(Point::new(annotation_range.start, 0))
3613 ..self.anchor_before(Point::new(
3614 annotation_range.end,
3615 self.line_len(annotation_range.end),
3616 ))
3617 }),
3618 });
3619 item_ends_stack.push(item.range.end);
3620 }
3621
3622 Some(anchor_items)
3623 }
3624
3625 fn next_outline_item(
3626 &self,
3627 config: &OutlineConfig,
3628 mat: &SyntaxMapMatch,
3629 range: &Range<usize>,
3630 include_extra_context: bool,
3631 theme: Option<&SyntaxTheme>,
3632 ) -> Option<OutlineItem<Point>> {
3633 let item_node = mat.captures.iter().find_map(|cap| {
3634 if cap.index == config.item_capture_ix {
3635 Some(cap.node)
3636 } else {
3637 None
3638 }
3639 })?;
3640
3641 let item_byte_range = item_node.byte_range();
3642 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3643 return None;
3644 }
3645 let item_point_range = Point::from_ts_point(item_node.start_position())
3646 ..Point::from_ts_point(item_node.end_position());
3647
3648 let mut open_point = None;
3649 let mut close_point = None;
3650 let mut buffer_ranges = Vec::new();
3651 for capture in mat.captures {
3652 let node_is_name;
3653 if capture.index == config.name_capture_ix {
3654 node_is_name = true;
3655 } else if Some(capture.index) == config.context_capture_ix
3656 || (Some(capture.index) == config.extra_context_capture_ix && include_extra_context)
3657 {
3658 node_is_name = false;
3659 } else {
3660 if Some(capture.index) == config.open_capture_ix {
3661 open_point = Some(Point::from_ts_point(capture.node.end_position()));
3662 } else if Some(capture.index) == config.close_capture_ix {
3663 close_point = Some(Point::from_ts_point(capture.node.start_position()));
3664 }
3665
3666 continue;
3667 }
3668
3669 let mut range = capture.node.start_byte()..capture.node.end_byte();
3670 let start = capture.node.start_position();
3671 if capture.node.end_position().row > start.row {
3672 range.end = range.start + self.line_len(start.row as u32) as usize - start.column;
3673 }
3674
3675 if !range.is_empty() {
3676 buffer_ranges.push((range, node_is_name));
3677 }
3678 }
3679 if buffer_ranges.is_empty() {
3680 return None;
3681 }
3682 let mut text = String::new();
3683 let mut highlight_ranges = Vec::new();
3684 let mut name_ranges = Vec::new();
3685 let mut chunks = self.chunks(
3686 buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end,
3687 true,
3688 );
3689 let mut last_buffer_range_end = 0;
3690
3691 for (buffer_range, is_name) in buffer_ranges {
3692 let space_added = !text.is_empty() && buffer_range.start > last_buffer_range_end;
3693 if space_added {
3694 text.push(' ');
3695 }
3696 let before_append_len = text.len();
3697 let mut offset = buffer_range.start;
3698 chunks.seek(buffer_range.clone());
3699 for mut chunk in chunks.by_ref() {
3700 if chunk.text.len() > buffer_range.end - offset {
3701 chunk.text = &chunk.text[0..(buffer_range.end - offset)];
3702 offset = buffer_range.end;
3703 } else {
3704 offset += chunk.text.len();
3705 }
3706 let style = chunk
3707 .syntax_highlight_id
3708 .zip(theme)
3709 .and_then(|(highlight, theme)| highlight.style(theme));
3710 if let Some(style) = style {
3711 let start = text.len();
3712 let end = start + chunk.text.len();
3713 highlight_ranges.push((start..end, style));
3714 }
3715 text.push_str(chunk.text);
3716 if offset >= buffer_range.end {
3717 break;
3718 }
3719 }
3720 if is_name {
3721 let after_append_len = text.len();
3722 let start = if space_added && !name_ranges.is_empty() {
3723 before_append_len - 1
3724 } else {
3725 before_append_len
3726 };
3727 name_ranges.push(start..after_append_len);
3728 }
3729 last_buffer_range_end = buffer_range.end;
3730 }
3731
3732 Some(OutlineItem {
3733 depth: 0, // We'll calculate the depth later
3734 range: item_point_range,
3735 text,
3736 highlight_ranges,
3737 name_ranges,
3738 body_range: open_point.zip(close_point).map(|(start, end)| start..end),
3739 annotation_range: None,
3740 })
3741 }
3742
3743 pub fn function_body_fold_ranges<T: ToOffset>(
3744 &self,
3745 within: Range<T>,
3746 ) -> impl Iterator<Item = Range<usize>> + '_ {
3747 self.text_object_ranges(within, TreeSitterOptions::default())
3748 .filter_map(|(range, obj)| (obj == TextObject::InsideFunction).then_some(range))
3749 }
3750
3751 /// For each grammar in the language, runs the provided
3752 /// [`tree_sitter::Query`] against the given range.
3753 pub fn matches(
3754 &self,
3755 range: Range<usize>,
3756 query: fn(&Grammar) -> Option<&tree_sitter::Query>,
3757 ) -> SyntaxMapMatches {
3758 self.syntax.matches(range, self, query)
3759 }
3760
3761 pub fn all_bracket_ranges(
3762 &self,
3763 range: Range<usize>,
3764 ) -> impl Iterator<Item = BracketMatch> + '_ {
3765 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3766 grammar.brackets_config.as_ref().map(|c| &c.query)
3767 });
3768 let configs = matches
3769 .grammars()
3770 .iter()
3771 .map(|grammar| grammar.brackets_config.as_ref().unwrap())
3772 .collect::<Vec<_>>();
3773
3774 iter::from_fn(move || {
3775 while let Some(mat) = matches.peek() {
3776 let mut open = None;
3777 let mut close = None;
3778 let config = &configs[mat.grammar_index];
3779 let pattern = &config.patterns[mat.pattern_index];
3780 for capture in mat.captures {
3781 if capture.index == config.open_capture_ix {
3782 open = Some(capture.node.byte_range());
3783 } else if capture.index == config.close_capture_ix {
3784 close = Some(capture.node.byte_range());
3785 }
3786 }
3787
3788 matches.advance();
3789
3790 let Some((open_range, close_range)) = open.zip(close) else {
3791 continue;
3792 };
3793
3794 let bracket_range = open_range.start..=close_range.end;
3795 if !bracket_range.overlaps(&range) {
3796 continue;
3797 }
3798
3799 return Some(BracketMatch {
3800 open_range,
3801 close_range,
3802 newline_only: pattern.newline_only,
3803 });
3804 }
3805 None
3806 })
3807 }
3808
3809 /// Returns bracket range pairs overlapping or adjacent to `range`
3810 pub fn bracket_ranges<T: ToOffset>(
3811 &self,
3812 range: Range<T>,
3813 ) -> impl Iterator<Item = BracketMatch> + '_ {
3814 // Find bracket pairs that *inclusively* contain the given range.
3815 let range = range.start.to_offset(self).saturating_sub(1)
3816 ..self.len().min(range.end.to_offset(self) + 1);
3817 self.all_bracket_ranges(range)
3818 .filter(|pair| !pair.newline_only)
3819 }
3820
3821 pub fn text_object_ranges<T: ToOffset>(
3822 &self,
3823 range: Range<T>,
3824 options: TreeSitterOptions,
3825 ) -> impl Iterator<Item = (Range<usize>, TextObject)> + '_ {
3826 let range = range.start.to_offset(self).saturating_sub(1)
3827 ..self.len().min(range.end.to_offset(self) + 1);
3828
3829 let mut matches =
3830 self.syntax
3831 .matches_with_options(range.clone(), &self.text, options, |grammar| {
3832 grammar.text_object_config.as_ref().map(|c| &c.query)
3833 });
3834
3835 let configs = matches
3836 .grammars()
3837 .iter()
3838 .map(|grammar| grammar.text_object_config.as_ref())
3839 .collect::<Vec<_>>();
3840
3841 let mut captures = Vec::<(Range<usize>, TextObject)>::new();
3842
3843 iter::from_fn(move || {
3844 loop {
3845 while let Some(capture) = captures.pop() {
3846 if capture.0.overlaps(&range) {
3847 return Some(capture);
3848 }
3849 }
3850
3851 let mat = matches.peek()?;
3852
3853 let Some(config) = configs[mat.grammar_index].as_ref() else {
3854 matches.advance();
3855 continue;
3856 };
3857
3858 for capture in mat.captures {
3859 let Some(ix) = config
3860 .text_objects_by_capture_ix
3861 .binary_search_by_key(&capture.index, |e| e.0)
3862 .ok()
3863 else {
3864 continue;
3865 };
3866 let text_object = config.text_objects_by_capture_ix[ix].1;
3867 let byte_range = capture.node.byte_range();
3868
3869 let mut found = false;
3870 for (range, existing) in captures.iter_mut() {
3871 if existing == &text_object {
3872 range.start = range.start.min(byte_range.start);
3873 range.end = range.end.max(byte_range.end);
3874 found = true;
3875 break;
3876 }
3877 }
3878
3879 if !found {
3880 captures.push((byte_range, text_object));
3881 }
3882 }
3883
3884 matches.advance();
3885 }
3886 })
3887 }
3888
3889 /// Returns enclosing bracket ranges containing the given range
3890 pub fn enclosing_bracket_ranges<T: ToOffset>(
3891 &self,
3892 range: Range<T>,
3893 ) -> impl Iterator<Item = BracketMatch> + '_ {
3894 let range = range.start.to_offset(self)..range.end.to_offset(self);
3895
3896 self.bracket_ranges(range.clone()).filter(move |pair| {
3897 pair.open_range.start <= range.start && pair.close_range.end >= range.end
3898 })
3899 }
3900
3901 /// Returns the smallest enclosing bracket ranges containing the given range or None if no brackets contain range
3902 ///
3903 /// Can optionally pass a range_filter to filter the ranges of brackets to consider
3904 pub fn innermost_enclosing_bracket_ranges<T: ToOffset>(
3905 &self,
3906 range: Range<T>,
3907 range_filter: Option<&dyn Fn(Range<usize>, Range<usize>) -> bool>,
3908 ) -> Option<(Range<usize>, Range<usize>)> {
3909 let range = range.start.to_offset(self)..range.end.to_offset(self);
3910
3911 // Get the ranges of the innermost pair of brackets.
3912 let mut result: Option<(Range<usize>, Range<usize>)> = None;
3913
3914 for pair in self.enclosing_bracket_ranges(range.clone()) {
3915 if let Some(range_filter) = range_filter {
3916 if !range_filter(pair.open_range.clone(), pair.close_range.clone()) {
3917 continue;
3918 }
3919 }
3920
3921 let len = pair.close_range.end - pair.open_range.start;
3922
3923 if let Some((existing_open, existing_close)) = &result {
3924 let existing_len = existing_close.end - existing_open.start;
3925 if len > existing_len {
3926 continue;
3927 }
3928 }
3929
3930 result = Some((pair.open_range, pair.close_range));
3931 }
3932
3933 result
3934 }
3935
3936 /// Returns anchor ranges for any matches of the redaction query.
3937 /// The buffer can be associated with multiple languages, and the redaction query associated with each
3938 /// will be run on the relevant section of the buffer.
3939 pub fn redacted_ranges<T: ToOffset>(
3940 &self,
3941 range: Range<T>,
3942 ) -> impl Iterator<Item = Range<usize>> + '_ {
3943 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
3944 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
3945 grammar
3946 .redactions_config
3947 .as_ref()
3948 .map(|config| &config.query)
3949 });
3950
3951 let configs = syntax_matches
3952 .grammars()
3953 .iter()
3954 .map(|grammar| grammar.redactions_config.as_ref())
3955 .collect::<Vec<_>>();
3956
3957 iter::from_fn(move || {
3958 let redacted_range = syntax_matches
3959 .peek()
3960 .and_then(|mat| {
3961 configs[mat.grammar_index].and_then(|config| {
3962 mat.captures
3963 .iter()
3964 .find(|capture| capture.index == config.redaction_capture_ix)
3965 })
3966 })
3967 .map(|mat| mat.node.byte_range());
3968 syntax_matches.advance();
3969 redacted_range
3970 })
3971 }
3972
3973 pub fn injections_intersecting_range<T: ToOffset>(
3974 &self,
3975 range: Range<T>,
3976 ) -> impl Iterator<Item = (Range<usize>, &Arc<Language>)> + '_ {
3977 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
3978
3979 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
3980 grammar
3981 .injection_config
3982 .as_ref()
3983 .map(|config| &config.query)
3984 });
3985
3986 let configs = syntax_matches
3987 .grammars()
3988 .iter()
3989 .map(|grammar| grammar.injection_config.as_ref())
3990 .collect::<Vec<_>>();
3991
3992 iter::from_fn(move || {
3993 let ranges = syntax_matches.peek().and_then(|mat| {
3994 let config = &configs[mat.grammar_index]?;
3995 let content_capture_range = mat.captures.iter().find_map(|capture| {
3996 if capture.index == config.content_capture_ix {
3997 Some(capture.node.byte_range())
3998 } else {
3999 None
4000 }
4001 })?;
4002 let language = self.language_at(content_capture_range.start)?;
4003 Some((content_capture_range, language))
4004 });
4005 syntax_matches.advance();
4006 ranges
4007 })
4008 }
4009
4010 pub fn runnable_ranges(
4011 &self,
4012 offset_range: Range<usize>,
4013 ) -> impl Iterator<Item = RunnableRange> + '_ {
4014 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4015 grammar.runnable_config.as_ref().map(|config| &config.query)
4016 });
4017
4018 let test_configs = syntax_matches
4019 .grammars()
4020 .iter()
4021 .map(|grammar| grammar.runnable_config.as_ref())
4022 .collect::<Vec<_>>();
4023
4024 iter::from_fn(move || {
4025 loop {
4026 let mat = syntax_matches.peek()?;
4027
4028 let test_range = test_configs[mat.grammar_index].and_then(|test_configs| {
4029 let mut run_range = None;
4030 let full_range = mat.captures.iter().fold(
4031 Range {
4032 start: usize::MAX,
4033 end: 0,
4034 },
4035 |mut acc, next| {
4036 let byte_range = next.node.byte_range();
4037 if acc.start > byte_range.start {
4038 acc.start = byte_range.start;
4039 }
4040 if acc.end < byte_range.end {
4041 acc.end = byte_range.end;
4042 }
4043 acc
4044 },
4045 );
4046 if full_range.start > full_range.end {
4047 // We did not find a full spanning range of this match.
4048 return None;
4049 }
4050 let extra_captures: SmallVec<[_; 1]> =
4051 SmallVec::from_iter(mat.captures.iter().filter_map(|capture| {
4052 test_configs
4053 .extra_captures
4054 .get(capture.index as usize)
4055 .cloned()
4056 .and_then(|tag_name| match tag_name {
4057 RunnableCapture::Named(name) => {
4058 Some((capture.node.byte_range(), name))
4059 }
4060 RunnableCapture::Run => {
4061 let _ = run_range.insert(capture.node.byte_range());
4062 None
4063 }
4064 })
4065 }));
4066 let run_range = run_range?;
4067 let tags = test_configs
4068 .query
4069 .property_settings(mat.pattern_index)
4070 .iter()
4071 .filter_map(|property| {
4072 if *property.key == *"tag" {
4073 property
4074 .value
4075 .as_ref()
4076 .map(|value| RunnableTag(value.to_string().into()))
4077 } else {
4078 None
4079 }
4080 })
4081 .collect();
4082 let extra_captures = extra_captures
4083 .into_iter()
4084 .map(|(range, name)| {
4085 (
4086 name.to_string(),
4087 self.text_for_range(range.clone()).collect::<String>(),
4088 )
4089 })
4090 .collect();
4091 // All tags should have the same range.
4092 Some(RunnableRange {
4093 run_range,
4094 full_range,
4095 runnable: Runnable {
4096 tags,
4097 language: mat.language,
4098 buffer: self.remote_id(),
4099 },
4100 extra_captures,
4101 buffer_id: self.remote_id(),
4102 })
4103 });
4104
4105 syntax_matches.advance();
4106 if test_range.is_some() {
4107 // It's fine for us to short-circuit on .peek()? returning None. We don't want to return None from this iter if we
4108 // had a capture that did not contain a run marker, hence we'll just loop around for the next capture.
4109 return test_range;
4110 }
4111 }
4112 })
4113 }
4114
4115 /// Returns selections for remote peers intersecting the given range.
4116 #[allow(clippy::type_complexity)]
4117 pub fn selections_in_range(
4118 &self,
4119 range: Range<Anchor>,
4120 include_local: bool,
4121 ) -> impl Iterator<
4122 Item = (
4123 ReplicaId,
4124 bool,
4125 CursorShape,
4126 impl Iterator<Item = &Selection<Anchor>> + '_,
4127 ),
4128 > + '_ {
4129 self.remote_selections
4130 .iter()
4131 .filter(move |(replica_id, set)| {
4132 (include_local || **replica_id != self.text.replica_id())
4133 && !set.selections.is_empty()
4134 })
4135 .map(move |(replica_id, set)| {
4136 let start_ix = match set.selections.binary_search_by(|probe| {
4137 probe.end.cmp(&range.start, self).then(Ordering::Greater)
4138 }) {
4139 Ok(ix) | Err(ix) => ix,
4140 };
4141 let end_ix = match set.selections.binary_search_by(|probe| {
4142 probe.start.cmp(&range.end, self).then(Ordering::Less)
4143 }) {
4144 Ok(ix) | Err(ix) => ix,
4145 };
4146
4147 (
4148 *replica_id,
4149 set.line_mode,
4150 set.cursor_shape,
4151 set.selections[start_ix..end_ix].iter(),
4152 )
4153 })
4154 }
4155
4156 /// Returns if the buffer contains any diagnostics.
4157 pub fn has_diagnostics(&self) -> bool {
4158 !self.diagnostics.is_empty()
4159 }
4160
4161 /// Returns all the diagnostics intersecting the given range.
4162 pub fn diagnostics_in_range<'a, T, O>(
4163 &'a self,
4164 search_range: Range<T>,
4165 reversed: bool,
4166 ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
4167 where
4168 T: 'a + Clone + ToOffset,
4169 O: 'a + FromAnchor,
4170 {
4171 let mut iterators: Vec<_> = self
4172 .diagnostics
4173 .iter()
4174 .map(|(_, collection)| {
4175 collection
4176 .range::<T, text::Anchor>(search_range.clone(), self, true, reversed)
4177 .peekable()
4178 })
4179 .collect();
4180
4181 std::iter::from_fn(move || {
4182 let (next_ix, _) = iterators
4183 .iter_mut()
4184 .enumerate()
4185 .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
4186 .min_by(|(_, a), (_, b)| {
4187 let cmp = a
4188 .range
4189 .start
4190 .cmp(&b.range.start, self)
4191 // when range is equal, sort by diagnostic severity
4192 .then(a.diagnostic.severity.cmp(&b.diagnostic.severity))
4193 // and stabilize order with group_id
4194 .then(a.diagnostic.group_id.cmp(&b.diagnostic.group_id));
4195 if reversed { cmp.reverse() } else { cmp }
4196 })?;
4197 iterators[next_ix]
4198 .next()
4199 .map(|DiagnosticEntry { range, diagnostic }| DiagnosticEntry {
4200 diagnostic,
4201 range: FromAnchor::from_anchor(&range.start, self)
4202 ..FromAnchor::from_anchor(&range.end, self),
4203 })
4204 })
4205 }
4206
4207 /// Returns all the diagnostic groups associated with the given
4208 /// language server ID. If no language server ID is provided,
4209 /// all diagnostics groups are returned.
4210 pub fn diagnostic_groups(
4211 &self,
4212 language_server_id: Option<LanguageServerId>,
4213 ) -> Vec<(LanguageServerId, DiagnosticGroup<Anchor>)> {
4214 let mut groups = Vec::new();
4215
4216 if let Some(language_server_id) = language_server_id {
4217 if let Ok(ix) = self
4218 .diagnostics
4219 .binary_search_by_key(&language_server_id, |e| e.0)
4220 {
4221 self.diagnostics[ix]
4222 .1
4223 .groups(language_server_id, &mut groups, self);
4224 }
4225 } else {
4226 for (language_server_id, diagnostics) in self.diagnostics.iter() {
4227 diagnostics.groups(*language_server_id, &mut groups, self);
4228 }
4229 }
4230
4231 groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
4232 let a_start = &group_a.entries[group_a.primary_ix].range.start;
4233 let b_start = &group_b.entries[group_b.primary_ix].range.start;
4234 a_start.cmp(b_start, self).then_with(|| id_a.cmp(id_b))
4235 });
4236
4237 groups
4238 }
4239
4240 /// Returns an iterator over the diagnostics for the given group.
4241 pub fn diagnostic_group<O>(
4242 &self,
4243 group_id: usize,
4244 ) -> impl Iterator<Item = DiagnosticEntry<O>> + '_
4245 where
4246 O: FromAnchor + 'static,
4247 {
4248 self.diagnostics
4249 .iter()
4250 .flat_map(move |(_, set)| set.group(group_id, self))
4251 }
4252
4253 /// An integer version number that accounts for all updates besides
4254 /// the buffer's text itself (which is versioned via a version vector).
4255 pub fn non_text_state_update_count(&self) -> usize {
4256 self.non_text_state_update_count
4257 }
4258
4259 /// Returns a snapshot of underlying file.
4260 pub fn file(&self) -> Option<&Arc<dyn File>> {
4261 self.file.as_ref()
4262 }
4263
4264 /// Resolves the file path (relative to the worktree root) associated with the underlying file.
4265 pub fn resolve_file_path(&self, cx: &App, include_root: bool) -> Option<PathBuf> {
4266 if let Some(file) = self.file() {
4267 if file.path().file_name().is_none() || include_root {
4268 Some(file.full_path(cx))
4269 } else {
4270 Some(file.path().to_path_buf())
4271 }
4272 } else {
4273 None
4274 }
4275 }
4276
4277 pub fn words_in_range(&self, query: WordsQuery) -> BTreeMap<String, Range<Anchor>> {
4278 let query_str = query.fuzzy_contents;
4279 if query_str.map_or(false, |query| query.is_empty()) {
4280 return BTreeMap::default();
4281 }
4282
4283 let classifier = CharClassifier::new(self.language.clone().map(|language| LanguageScope {
4284 language,
4285 override_id: None,
4286 }));
4287
4288 let mut query_ix = 0;
4289 let query_chars = query_str.map(|query| query.chars().collect::<Vec<_>>());
4290 let query_len = query_chars.as_ref().map_or(0, |query| query.len());
4291
4292 let mut words = BTreeMap::default();
4293 let mut current_word_start_ix = None;
4294 let mut chunk_ix = query.range.start;
4295 for chunk in self.chunks(query.range, false) {
4296 for (i, c) in chunk.text.char_indices() {
4297 let ix = chunk_ix + i;
4298 if classifier.is_word(c) {
4299 if current_word_start_ix.is_none() {
4300 current_word_start_ix = Some(ix);
4301 }
4302
4303 if let Some(query_chars) = &query_chars {
4304 if query_ix < query_len {
4305 if c.to_lowercase().eq(query_chars[query_ix].to_lowercase()) {
4306 query_ix += 1;
4307 }
4308 }
4309 }
4310 continue;
4311 } else if let Some(word_start) = current_word_start_ix.take() {
4312 if query_ix == query_len {
4313 let word_range = self.anchor_before(word_start)..self.anchor_after(ix);
4314 let mut word_text = self.text_for_range(word_start..ix).peekable();
4315 let first_char = word_text
4316 .peek()
4317 .and_then(|first_chunk| first_chunk.chars().next());
4318 // Skip empty and "words" starting with digits as a heuristic to reduce useless completions
4319 if !query.skip_digits
4320 || first_char.map_or(true, |first_char| !first_char.is_digit(10))
4321 {
4322 words.insert(word_text.collect(), word_range);
4323 }
4324 }
4325 }
4326 query_ix = 0;
4327 }
4328 chunk_ix += chunk.text.len();
4329 }
4330
4331 words
4332 }
4333}
4334
4335pub struct WordsQuery<'a> {
4336 /// Only returns words with all chars from the fuzzy string in them.
4337 pub fuzzy_contents: Option<&'a str>,
4338 /// Skips words that start with a digit.
4339 pub skip_digits: bool,
4340 /// Buffer offset range, to look for words.
4341 pub range: Range<usize>,
4342}
4343
4344fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
4345 indent_size_for_text(text.chars_at(Point::new(row, 0)))
4346}
4347
4348fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
4349 let mut result = IndentSize::spaces(0);
4350 for c in text {
4351 let kind = match c {
4352 ' ' => IndentKind::Space,
4353 '\t' => IndentKind::Tab,
4354 _ => break,
4355 };
4356 if result.len == 0 {
4357 result.kind = kind;
4358 }
4359 result.len += 1;
4360 }
4361 result
4362}
4363
4364impl Clone for BufferSnapshot {
4365 fn clone(&self) -> Self {
4366 Self {
4367 text: self.text.clone(),
4368 syntax: self.syntax.clone(),
4369 file: self.file.clone(),
4370 remote_selections: self.remote_selections.clone(),
4371 diagnostics: self.diagnostics.clone(),
4372 language: self.language.clone(),
4373 non_text_state_update_count: self.non_text_state_update_count,
4374 }
4375 }
4376}
4377
4378impl Deref for BufferSnapshot {
4379 type Target = text::BufferSnapshot;
4380
4381 fn deref(&self) -> &Self::Target {
4382 &self.text
4383 }
4384}
4385
4386unsafe impl Send for BufferChunks<'_> {}
4387
4388impl<'a> BufferChunks<'a> {
4389 pub(crate) fn new(
4390 text: &'a Rope,
4391 range: Range<usize>,
4392 syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
4393 diagnostics: bool,
4394 buffer_snapshot: Option<&'a BufferSnapshot>,
4395 ) -> Self {
4396 let mut highlights = None;
4397 if let Some((captures, highlight_maps)) = syntax {
4398 highlights = Some(BufferChunkHighlights {
4399 captures,
4400 next_capture: None,
4401 stack: Default::default(),
4402 highlight_maps,
4403 })
4404 }
4405
4406 let diagnostic_endpoints = diagnostics.then(|| Vec::new().into_iter().peekable());
4407 let chunks = text.chunks_in_range(range.clone());
4408
4409 let mut this = BufferChunks {
4410 range,
4411 buffer_snapshot,
4412 chunks,
4413 diagnostic_endpoints,
4414 error_depth: 0,
4415 warning_depth: 0,
4416 information_depth: 0,
4417 hint_depth: 0,
4418 unnecessary_depth: 0,
4419 underline: true,
4420 highlights,
4421 };
4422 this.initialize_diagnostic_endpoints();
4423 this
4424 }
4425
4426 /// Seeks to the given byte offset in the buffer.
4427 pub fn seek(&mut self, range: Range<usize>) {
4428 let old_range = std::mem::replace(&mut self.range, range.clone());
4429 self.chunks.set_range(self.range.clone());
4430 if let Some(highlights) = self.highlights.as_mut() {
4431 if old_range.start <= self.range.start && old_range.end >= self.range.end {
4432 // Reuse existing highlights stack, as the new range is a subrange of the old one.
4433 highlights
4434 .stack
4435 .retain(|(end_offset, _)| *end_offset > range.start);
4436 if let Some(capture) = &highlights.next_capture {
4437 if range.start >= capture.node.start_byte() {
4438 let next_capture_end = capture.node.end_byte();
4439 if range.start < next_capture_end {
4440 highlights.stack.push((
4441 next_capture_end,
4442 highlights.highlight_maps[capture.grammar_index].get(capture.index),
4443 ));
4444 }
4445 highlights.next_capture.take();
4446 }
4447 }
4448 } else if let Some(snapshot) = self.buffer_snapshot {
4449 let (captures, highlight_maps) = snapshot.get_highlights(self.range.clone());
4450 *highlights = BufferChunkHighlights {
4451 captures,
4452 next_capture: None,
4453 stack: Default::default(),
4454 highlight_maps,
4455 };
4456 } else {
4457 // We cannot obtain new highlights for a language-aware buffer iterator, as we don't have a buffer snapshot.
4458 // Seeking such BufferChunks is not supported.
4459 debug_assert!(
4460 false,
4461 "Attempted to seek on a language-aware buffer iterator without associated buffer snapshot"
4462 );
4463 }
4464
4465 highlights.captures.set_byte_range(self.range.clone());
4466 self.initialize_diagnostic_endpoints();
4467 }
4468 }
4469
4470 fn initialize_diagnostic_endpoints(&mut self) {
4471 if let Some(diagnostics) = self.diagnostic_endpoints.as_mut() {
4472 if let Some(buffer) = self.buffer_snapshot {
4473 let mut diagnostic_endpoints = Vec::new();
4474 for entry in buffer.diagnostics_in_range::<_, usize>(self.range.clone(), false) {
4475 diagnostic_endpoints.push(DiagnosticEndpoint {
4476 offset: entry.range.start,
4477 is_start: true,
4478 severity: entry.diagnostic.severity,
4479 is_unnecessary: entry.diagnostic.is_unnecessary,
4480 underline: entry.diagnostic.underline,
4481 });
4482 diagnostic_endpoints.push(DiagnosticEndpoint {
4483 offset: entry.range.end,
4484 is_start: false,
4485 severity: entry.diagnostic.severity,
4486 is_unnecessary: entry.diagnostic.is_unnecessary,
4487 underline: entry.diagnostic.underline,
4488 });
4489 }
4490 diagnostic_endpoints
4491 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
4492 *diagnostics = diagnostic_endpoints.into_iter().peekable();
4493 self.hint_depth = 0;
4494 self.error_depth = 0;
4495 self.warning_depth = 0;
4496 self.information_depth = 0;
4497 }
4498 }
4499 }
4500
4501 /// The current byte offset in the buffer.
4502 pub fn offset(&self) -> usize {
4503 self.range.start
4504 }
4505
4506 pub fn range(&self) -> Range<usize> {
4507 self.range.clone()
4508 }
4509
4510 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
4511 let depth = match endpoint.severity {
4512 DiagnosticSeverity::ERROR => &mut self.error_depth,
4513 DiagnosticSeverity::WARNING => &mut self.warning_depth,
4514 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
4515 DiagnosticSeverity::HINT => &mut self.hint_depth,
4516 _ => return,
4517 };
4518 if endpoint.is_start {
4519 *depth += 1;
4520 } else {
4521 *depth -= 1;
4522 }
4523
4524 if endpoint.is_unnecessary {
4525 if endpoint.is_start {
4526 self.unnecessary_depth += 1;
4527 } else {
4528 self.unnecessary_depth -= 1;
4529 }
4530 }
4531 }
4532
4533 fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
4534 if self.error_depth > 0 {
4535 Some(DiagnosticSeverity::ERROR)
4536 } else if self.warning_depth > 0 {
4537 Some(DiagnosticSeverity::WARNING)
4538 } else if self.information_depth > 0 {
4539 Some(DiagnosticSeverity::INFORMATION)
4540 } else if self.hint_depth > 0 {
4541 Some(DiagnosticSeverity::HINT)
4542 } else {
4543 None
4544 }
4545 }
4546
4547 fn current_code_is_unnecessary(&self) -> bool {
4548 self.unnecessary_depth > 0
4549 }
4550}
4551
4552impl<'a> Iterator for BufferChunks<'a> {
4553 type Item = Chunk<'a>;
4554
4555 fn next(&mut self) -> Option<Self::Item> {
4556 let mut next_capture_start = usize::MAX;
4557 let mut next_diagnostic_endpoint = usize::MAX;
4558
4559 if let Some(highlights) = self.highlights.as_mut() {
4560 while let Some((parent_capture_end, _)) = highlights.stack.last() {
4561 if *parent_capture_end <= self.range.start {
4562 highlights.stack.pop();
4563 } else {
4564 break;
4565 }
4566 }
4567
4568 if highlights.next_capture.is_none() {
4569 highlights.next_capture = highlights.captures.next();
4570 }
4571
4572 while let Some(capture) = highlights.next_capture.as_ref() {
4573 if self.range.start < capture.node.start_byte() {
4574 next_capture_start = capture.node.start_byte();
4575 break;
4576 } else {
4577 let highlight_id =
4578 highlights.highlight_maps[capture.grammar_index].get(capture.index);
4579 highlights
4580 .stack
4581 .push((capture.node.end_byte(), highlight_id));
4582 highlights.next_capture = highlights.captures.next();
4583 }
4584 }
4585 }
4586
4587 let mut diagnostic_endpoints = std::mem::take(&mut self.diagnostic_endpoints);
4588 if let Some(diagnostic_endpoints) = diagnostic_endpoints.as_mut() {
4589 while let Some(endpoint) = diagnostic_endpoints.peek().copied() {
4590 if endpoint.offset <= self.range.start {
4591 self.update_diagnostic_depths(endpoint);
4592 diagnostic_endpoints.next();
4593 self.underline = endpoint.underline;
4594 } else {
4595 next_diagnostic_endpoint = endpoint.offset;
4596 break;
4597 }
4598 }
4599 }
4600 self.diagnostic_endpoints = diagnostic_endpoints;
4601
4602 if let Some(chunk) = self.chunks.peek() {
4603 let chunk_start = self.range.start;
4604 let mut chunk_end = (self.chunks.offset() + chunk.len())
4605 .min(next_capture_start)
4606 .min(next_diagnostic_endpoint);
4607 let mut highlight_id = None;
4608 if let Some(highlights) = self.highlights.as_ref() {
4609 if let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last() {
4610 chunk_end = chunk_end.min(*parent_capture_end);
4611 highlight_id = Some(*parent_highlight_id);
4612 }
4613 }
4614
4615 let slice =
4616 &chunk[chunk_start - self.chunks.offset()..chunk_end - self.chunks.offset()];
4617 self.range.start = chunk_end;
4618 if self.range.start == self.chunks.offset() + chunk.len() {
4619 self.chunks.next().unwrap();
4620 }
4621
4622 Some(Chunk {
4623 text: slice,
4624 syntax_highlight_id: highlight_id,
4625 underline: self.underline,
4626 diagnostic_severity: self.current_diagnostic_severity(),
4627 is_unnecessary: self.current_code_is_unnecessary(),
4628 ..Chunk::default()
4629 })
4630 } else {
4631 None
4632 }
4633 }
4634}
4635
4636impl operation_queue::Operation for Operation {
4637 fn lamport_timestamp(&self) -> clock::Lamport {
4638 match self {
4639 Operation::Buffer(_) => {
4640 unreachable!("buffer operations should never be deferred at this layer")
4641 }
4642 Operation::UpdateDiagnostics {
4643 lamport_timestamp, ..
4644 }
4645 | Operation::UpdateSelections {
4646 lamport_timestamp, ..
4647 }
4648 | Operation::UpdateCompletionTriggers {
4649 lamport_timestamp, ..
4650 } => *lamport_timestamp,
4651 }
4652 }
4653}
4654
4655impl Default for Diagnostic {
4656 fn default() -> Self {
4657 Self {
4658 source: Default::default(),
4659 source_kind: DiagnosticSourceKind::Other,
4660 code: None,
4661 code_description: None,
4662 severity: DiagnosticSeverity::ERROR,
4663 message: Default::default(),
4664 markdown: None,
4665 group_id: 0,
4666 is_primary: false,
4667 is_disk_based: false,
4668 is_unnecessary: false,
4669 underline: true,
4670 data: None,
4671 }
4672 }
4673}
4674
4675impl IndentSize {
4676 /// Returns an [`IndentSize`] representing the given spaces.
4677 pub fn spaces(len: u32) -> Self {
4678 Self {
4679 len,
4680 kind: IndentKind::Space,
4681 }
4682 }
4683
4684 /// Returns an [`IndentSize`] representing a tab.
4685 pub fn tab() -> Self {
4686 Self {
4687 len: 1,
4688 kind: IndentKind::Tab,
4689 }
4690 }
4691
4692 /// An iterator over the characters represented by this [`IndentSize`].
4693 pub fn chars(&self) -> impl Iterator<Item = char> {
4694 iter::repeat(self.char()).take(self.len as usize)
4695 }
4696
4697 /// The character representation of this [`IndentSize`].
4698 pub fn char(&self) -> char {
4699 match self.kind {
4700 IndentKind::Space => ' ',
4701 IndentKind::Tab => '\t',
4702 }
4703 }
4704
4705 /// Consumes the current [`IndentSize`] and returns a new one that has
4706 /// been shrunk or enlarged by the given size along the given direction.
4707 pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
4708 match direction {
4709 Ordering::Less => {
4710 if self.kind == size.kind && self.len >= size.len {
4711 self.len -= size.len;
4712 }
4713 }
4714 Ordering::Equal => {}
4715 Ordering::Greater => {
4716 if self.len == 0 {
4717 self = size;
4718 } else if self.kind == size.kind {
4719 self.len += size.len;
4720 }
4721 }
4722 }
4723 self
4724 }
4725
4726 pub fn len_with_expanded_tabs(&self, tab_size: NonZeroU32) -> usize {
4727 match self.kind {
4728 IndentKind::Space => self.len as usize,
4729 IndentKind::Tab => self.len as usize * tab_size.get() as usize,
4730 }
4731 }
4732}
4733
4734#[cfg(any(test, feature = "test-support"))]
4735pub struct TestFile {
4736 pub path: Arc<Path>,
4737 pub root_name: String,
4738 pub local_root: Option<PathBuf>,
4739}
4740
4741#[cfg(any(test, feature = "test-support"))]
4742impl File for TestFile {
4743 fn path(&self) -> &Arc<Path> {
4744 &self.path
4745 }
4746
4747 fn full_path(&self, _: &gpui::App) -> PathBuf {
4748 PathBuf::from(&self.root_name).join(self.path.as_ref())
4749 }
4750
4751 fn as_local(&self) -> Option<&dyn LocalFile> {
4752 if self.local_root.is_some() {
4753 Some(self)
4754 } else {
4755 None
4756 }
4757 }
4758
4759 fn disk_state(&self) -> DiskState {
4760 unimplemented!()
4761 }
4762
4763 fn file_name<'a>(&'a self, _: &'a gpui::App) -> &'a std::ffi::OsStr {
4764 self.path().file_name().unwrap_or(self.root_name.as_ref())
4765 }
4766
4767 fn worktree_id(&self, _: &App) -> WorktreeId {
4768 WorktreeId::from_usize(0)
4769 }
4770
4771 fn to_proto(&self, _: &App) -> rpc::proto::File {
4772 unimplemented!()
4773 }
4774
4775 fn is_private(&self) -> bool {
4776 false
4777 }
4778}
4779
4780#[cfg(any(test, feature = "test-support"))]
4781impl LocalFile for TestFile {
4782 fn abs_path(&self, _cx: &App) -> PathBuf {
4783 PathBuf::from(self.local_root.as_ref().unwrap())
4784 .join(&self.root_name)
4785 .join(self.path.as_ref())
4786 }
4787
4788 fn load(&self, _cx: &App) -> Task<Result<String>> {
4789 unimplemented!()
4790 }
4791
4792 fn load_bytes(&self, _cx: &App) -> Task<Result<Vec<u8>>> {
4793 unimplemented!()
4794 }
4795}
4796
4797pub(crate) fn contiguous_ranges(
4798 values: impl Iterator<Item = u32>,
4799 max_len: usize,
4800) -> impl Iterator<Item = Range<u32>> {
4801 let mut values = values;
4802 let mut current_range: Option<Range<u32>> = None;
4803 std::iter::from_fn(move || {
4804 loop {
4805 if let Some(value) = values.next() {
4806 if let Some(range) = &mut current_range {
4807 if value == range.end && range.len() < max_len {
4808 range.end += 1;
4809 continue;
4810 }
4811 }
4812
4813 let prev_range = current_range.clone();
4814 current_range = Some(value..(value + 1));
4815 if prev_range.is_some() {
4816 return prev_range;
4817 }
4818 } else {
4819 return current_range.take();
4820 }
4821 }
4822 })
4823}
4824
4825#[derive(Default, Debug)]
4826pub struct CharClassifier {
4827 scope: Option<LanguageScope>,
4828 for_completion: bool,
4829 ignore_punctuation: bool,
4830}
4831
4832impl CharClassifier {
4833 pub fn new(scope: Option<LanguageScope>) -> Self {
4834 Self {
4835 scope,
4836 for_completion: false,
4837 ignore_punctuation: false,
4838 }
4839 }
4840
4841 pub fn for_completion(self, for_completion: bool) -> Self {
4842 Self {
4843 for_completion,
4844 ..self
4845 }
4846 }
4847
4848 pub fn ignore_punctuation(self, ignore_punctuation: bool) -> Self {
4849 Self {
4850 ignore_punctuation,
4851 ..self
4852 }
4853 }
4854
4855 pub fn is_whitespace(&self, c: char) -> bool {
4856 self.kind(c) == CharKind::Whitespace
4857 }
4858
4859 pub fn is_word(&self, c: char) -> bool {
4860 self.kind(c) == CharKind::Word
4861 }
4862
4863 pub fn is_punctuation(&self, c: char) -> bool {
4864 self.kind(c) == CharKind::Punctuation
4865 }
4866
4867 pub fn kind_with(&self, c: char, ignore_punctuation: bool) -> CharKind {
4868 if c.is_alphanumeric() || c == '_' {
4869 return CharKind::Word;
4870 }
4871
4872 if let Some(scope) = &self.scope {
4873 let characters = if self.for_completion {
4874 scope.completion_query_characters()
4875 } else {
4876 scope.word_characters()
4877 };
4878 if let Some(characters) = characters {
4879 if characters.contains(&c) {
4880 return CharKind::Word;
4881 }
4882 }
4883 }
4884
4885 if c.is_whitespace() {
4886 return CharKind::Whitespace;
4887 }
4888
4889 if ignore_punctuation {
4890 CharKind::Word
4891 } else {
4892 CharKind::Punctuation
4893 }
4894 }
4895
4896 pub fn kind(&self, c: char) -> CharKind {
4897 self.kind_with(c, self.ignore_punctuation)
4898 }
4899}
4900
4901/// Find all of the ranges of whitespace that occur at the ends of lines
4902/// in the given rope.
4903///
4904/// This could also be done with a regex search, but this implementation
4905/// avoids copying text.
4906pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
4907 let mut ranges = Vec::new();
4908
4909 let mut offset = 0;
4910 let mut prev_chunk_trailing_whitespace_range = 0..0;
4911 for chunk in rope.chunks() {
4912 let mut prev_line_trailing_whitespace_range = 0..0;
4913 for (i, line) in chunk.split('\n').enumerate() {
4914 let line_end_offset = offset + line.len();
4915 let trimmed_line_len = line.trim_end_matches([' ', '\t']).len();
4916 let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
4917
4918 if i == 0 && trimmed_line_len == 0 {
4919 trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
4920 }
4921 if !prev_line_trailing_whitespace_range.is_empty() {
4922 ranges.push(prev_line_trailing_whitespace_range);
4923 }
4924
4925 offset = line_end_offset + 1;
4926 prev_line_trailing_whitespace_range = trailing_whitespace_range;
4927 }
4928
4929 offset -= 1;
4930 prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
4931 }
4932
4933 if !prev_chunk_trailing_whitespace_range.is_empty() {
4934 ranges.push(prev_chunk_trailing_whitespace_range);
4935 }
4936
4937 ranges
4938}