1pub use crate::{
2 Grammar, Language, LanguageRegistry,
3 diagnostic_set::DiagnosticSet,
4 highlight_map::{HighlightId, HighlightMap},
5 proto,
6};
7use crate::{
8 LanguageScope, Outline, OutlineConfig, RunnableCapture, RunnableTag, TextObject,
9 TreeSitterOptions,
10 diagnostic_set::{DiagnosticEntry, DiagnosticGroup},
11 language_settings::{LanguageSettings, language_settings},
12 outline::OutlineItem,
13 syntax_map::{
14 SyntaxLayer, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures, SyntaxMapMatch,
15 SyntaxMapMatches, SyntaxSnapshot, ToTreeSitterPoint,
16 },
17 task_context::RunnableRange,
18 text_diff::text_diff,
19};
20use anyhow::{Context as _, Result};
21pub use clock::ReplicaId;
22use clock::{AGENT_REPLICA_ID, Lamport};
23use collections::HashMap;
24use fs::MTime;
25use futures::channel::oneshot;
26use gpui::{
27 App, AppContext as _, Context, Entity, EventEmitter, HighlightStyle, SharedString, StyledText,
28 Task, TaskLabel, TextStyle,
29};
30use lsp::{LanguageServerId, NumberOrString};
31use parking_lot::Mutex;
32use schemars::JsonSchema;
33use serde::{Deserialize, Serialize};
34use serde_json::Value;
35use settings::WorktreeId;
36use smallvec::SmallVec;
37use smol::future::yield_now;
38use std::{
39 any::Any,
40 borrow::Cow,
41 cell::Cell,
42 cmp::{self, Ordering, Reverse},
43 collections::{BTreeMap, BTreeSet},
44 ffi::OsStr,
45 future::Future,
46 iter::{self, Iterator, Peekable},
47 mem,
48 num::NonZeroU32,
49 ops::{Deref, Range},
50 path::{Path, PathBuf},
51 rc,
52 sync::{Arc, LazyLock},
53 time::{Duration, Instant},
54 vec,
55};
56use sum_tree::TreeMap;
57use text::operation_queue::OperationQueue;
58use text::*;
59pub use text::{
60 Anchor, Bias, Buffer as TextBuffer, BufferId, BufferSnapshot as TextBufferSnapshot, Edit,
61 LineIndent, OffsetRangeExt, OffsetUtf16, Patch, Point, PointUtf16, Rope, Selection,
62 SelectionGoal, Subscription, TextDimension, TextSummary, ToOffset, ToOffsetUtf16, ToPoint,
63 ToPointUtf16, Transaction, TransactionId, Unclipped,
64};
65use theme::{ActiveTheme as _, SyntaxTheme};
66#[cfg(any(test, feature = "test-support"))]
67use util::RandomCharIter;
68use util::{RangeExt, debug_panic, maybe};
69
70#[cfg(any(test, feature = "test-support"))]
71pub use {tree_sitter_python, tree_sitter_rust, tree_sitter_typescript};
72
73pub use lsp::DiagnosticSeverity;
74
75/// A label for the background task spawned by the buffer to compute
76/// a diff against the contents of its file.
77pub static BUFFER_DIFF_TASK: LazyLock<TaskLabel> = LazyLock::new(TaskLabel::new);
78
79/// Indicate whether a [`Buffer`] has permissions to edit.
80#[derive(PartialEq, Clone, Copy, Debug)]
81pub enum Capability {
82 /// The buffer is a mutable replica.
83 ReadWrite,
84 /// The buffer is a read-only replica.
85 ReadOnly,
86}
87
88pub type BufferRow = u32;
89
90/// An in-memory representation of a source code file, including its text,
91/// syntax trees, git status, and diagnostics.
92pub struct Buffer {
93 text: TextBuffer,
94 branch_state: Option<BufferBranchState>,
95 /// Filesystem state, `None` when there is no path.
96 file: Option<Arc<dyn File>>,
97 /// The mtime of the file when this buffer was last loaded from
98 /// or saved to disk.
99 saved_mtime: Option<MTime>,
100 /// The version vector when this buffer was last loaded from
101 /// or saved to disk.
102 saved_version: clock::Global,
103 preview_version: clock::Global,
104 transaction_depth: usize,
105 was_dirty_before_starting_transaction: Option<bool>,
106 reload_task: Option<Task<Result<()>>>,
107 language: Option<Arc<Language>>,
108 autoindent_requests: Vec<Arc<AutoindentRequest>>,
109 pending_autoindent: Option<Task<()>>,
110 sync_parse_timeout: Duration,
111 syntax_map: Mutex<SyntaxMap>,
112 reparse: Option<Task<()>>,
113 parse_status: (watch::Sender<ParseStatus>, watch::Receiver<ParseStatus>),
114 non_text_state_update_count: usize,
115 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
116 remote_selections: TreeMap<ReplicaId, SelectionSet>,
117 diagnostics_timestamp: clock::Lamport,
118 completion_triggers: BTreeSet<String>,
119 completion_triggers_per_language_server: HashMap<LanguageServerId, BTreeSet<String>>,
120 completion_triggers_timestamp: clock::Lamport,
121 deferred_ops: OperationQueue<Operation>,
122 capability: Capability,
123 has_conflict: bool,
124 /// Memoize calls to has_changes_since(saved_version).
125 /// The contents of a cell are (self.version, has_changes) at the time of a last call.
126 has_unsaved_edits: Cell<(clock::Global, bool)>,
127 change_bits: Vec<rc::Weak<Cell<bool>>>,
128 _subscriptions: Vec<gpui::Subscription>,
129 /// The result id received last time when pulling diagnostics for this buffer.
130 pull_diagnostics_result_id: Option<String>,
131}
132
133#[derive(Copy, Clone, Debug, PartialEq, Eq)]
134pub enum ParseStatus {
135 Idle,
136 Parsing,
137}
138
139struct BufferBranchState {
140 base_buffer: Entity<Buffer>,
141 merged_operations: Vec<Lamport>,
142}
143
144/// An immutable, cheaply cloneable representation of a fixed
145/// state of a buffer.
146pub struct BufferSnapshot {
147 pub text: text::BufferSnapshot,
148 pub(crate) syntax: SyntaxSnapshot,
149 file: Option<Arc<dyn File>>,
150 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
151 remote_selections: TreeMap<ReplicaId, SelectionSet>,
152 language: Option<Arc<Language>>,
153 non_text_state_update_count: usize,
154}
155
156/// The kind and amount of indentation in a particular line. For now,
157/// assumes that indentation is all the same character.
158#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
159pub struct IndentSize {
160 /// The number of bytes that comprise the indentation.
161 pub len: u32,
162 /// The kind of whitespace used for indentation.
163 pub kind: IndentKind,
164}
165
166/// A whitespace character that's used for indentation.
167#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
168pub enum IndentKind {
169 /// An ASCII space character.
170 #[default]
171 Space,
172 /// An ASCII tab character.
173 Tab,
174}
175
176/// The shape of a selection cursor.
177#[derive(Copy, Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq, JsonSchema)]
178#[serde(rename_all = "snake_case")]
179pub enum CursorShape {
180 /// A vertical bar
181 #[default]
182 Bar,
183 /// A block that surrounds the following character
184 Block,
185 /// An underline that runs along the following character
186 Underline,
187 /// A box drawn around the following character
188 Hollow,
189}
190
191#[derive(Clone, Debug)]
192struct SelectionSet {
193 line_mode: bool,
194 cursor_shape: CursorShape,
195 selections: Arc<[Selection<Anchor>]>,
196 lamport_timestamp: clock::Lamport,
197}
198
199/// A diagnostic associated with a certain range of a buffer.
200#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
201pub struct Diagnostic {
202 /// The name of the service that produced this diagnostic.
203 pub source: Option<String>,
204 /// A machine-readable code that identifies this diagnostic.
205 pub code: Option<NumberOrString>,
206 pub code_description: Option<lsp::Url>,
207 /// Whether this diagnostic is a hint, warning, or error.
208 pub severity: DiagnosticSeverity,
209 /// The human-readable message associated with this diagnostic.
210 pub message: String,
211 /// The human-readable message (in markdown format)
212 pub markdown: Option<String>,
213 /// An id that identifies the group to which this diagnostic belongs.
214 ///
215 /// When a language server produces a diagnostic with
216 /// one or more associated diagnostics, those diagnostics are all
217 /// assigned a single group ID.
218 pub group_id: usize,
219 /// Whether this diagnostic is the primary diagnostic for its group.
220 ///
221 /// In a given group, the primary diagnostic is the top-level diagnostic
222 /// returned by the language server. The non-primary diagnostics are the
223 /// associated diagnostics.
224 pub is_primary: bool,
225 /// Whether this diagnostic is considered to originate from an analysis of
226 /// files on disk, as opposed to any unsaved buffer contents. This is a
227 /// property of a given diagnostic source, and is configured for a given
228 /// language server via the [`LspAdapter::disk_based_diagnostic_sources`](crate::LspAdapter::disk_based_diagnostic_sources) method
229 /// for the language server.
230 pub is_disk_based: bool,
231 /// Whether this diagnostic marks unnecessary code.
232 pub is_unnecessary: bool,
233 /// Quick separation of diagnostics groups based by their source.
234 pub source_kind: DiagnosticSourceKind,
235 /// Data from language server that produced this diagnostic. Passed back to the LS when we request code actions for this diagnostic.
236 pub data: Option<Value>,
237 /// Whether to underline the corresponding text range in the editor.
238 pub underline: bool,
239}
240
241#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)]
242pub enum DiagnosticSourceKind {
243 Pulled,
244 Pushed,
245 Other,
246}
247
248/// An operation used to synchronize this buffer with its other replicas.
249#[derive(Clone, Debug, PartialEq)]
250pub enum Operation {
251 /// A text operation.
252 Buffer(text::Operation),
253
254 /// An update to the buffer's diagnostics.
255 UpdateDiagnostics {
256 /// The id of the language server that produced the new diagnostics.
257 server_id: LanguageServerId,
258 /// The diagnostics.
259 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
260 /// The buffer's lamport timestamp.
261 lamport_timestamp: clock::Lamport,
262 },
263
264 /// An update to the most recent selections in this buffer.
265 UpdateSelections {
266 /// The selections.
267 selections: Arc<[Selection<Anchor>]>,
268 /// The buffer's lamport timestamp.
269 lamport_timestamp: clock::Lamport,
270 /// Whether the selections are in 'line mode'.
271 line_mode: bool,
272 /// The [`CursorShape`] associated with these selections.
273 cursor_shape: CursorShape,
274 },
275
276 /// An update to the characters that should trigger autocompletion
277 /// for this buffer.
278 UpdateCompletionTriggers {
279 /// The characters that trigger autocompletion.
280 triggers: Vec<String>,
281 /// The buffer's lamport timestamp.
282 lamport_timestamp: clock::Lamport,
283 /// The language server ID.
284 server_id: LanguageServerId,
285 },
286}
287
288/// An event that occurs in a buffer.
289#[derive(Clone, Debug, PartialEq)]
290pub enum BufferEvent {
291 /// The buffer was changed in a way that must be
292 /// propagated to its other replicas.
293 Operation {
294 operation: Operation,
295 is_local: bool,
296 },
297 /// The buffer was edited.
298 Edited,
299 /// The buffer's `dirty` bit changed.
300 DirtyChanged,
301 /// The buffer was saved.
302 Saved,
303 /// The buffer's file was changed on disk.
304 FileHandleChanged,
305 /// The buffer was reloaded.
306 Reloaded,
307 /// The buffer is in need of a reload
308 ReloadNeeded,
309 /// The buffer's language was changed.
310 LanguageChanged,
311 /// The buffer's syntax trees were updated.
312 Reparsed,
313 /// The buffer's diagnostics were updated.
314 DiagnosticsUpdated,
315 /// The buffer gained or lost editing capabilities.
316 CapabilityChanged,
317 /// The buffer was explicitly requested to close.
318 Closed,
319 /// The buffer was discarded when closing.
320 Discarded,
321}
322
323/// The file associated with a buffer.
324pub trait File: Send + Sync + Any {
325 /// Returns the [`LocalFile`] associated with this file, if the
326 /// file is local.
327 fn as_local(&self) -> Option<&dyn LocalFile>;
328
329 /// Returns whether this file is local.
330 fn is_local(&self) -> bool {
331 self.as_local().is_some()
332 }
333
334 /// Returns whether the file is new, exists in storage, or has been deleted. Includes metadata
335 /// only available in some states, such as modification time.
336 fn disk_state(&self) -> DiskState;
337
338 /// Returns the path of this file relative to the worktree's root directory.
339 fn path(&self) -> &Arc<Path>;
340
341 /// Returns the path of this file relative to the worktree's parent directory (this means it
342 /// includes the name of the worktree's root folder).
343 fn full_path(&self, cx: &App) -> PathBuf;
344
345 /// Returns the last component of this handle's absolute path. If this handle refers to the root
346 /// of its worktree, then this method will return the name of the worktree itself.
347 fn file_name<'a>(&'a self, cx: &'a App) -> &'a OsStr;
348
349 /// Returns the id of the worktree to which this file belongs.
350 ///
351 /// This is needed for looking up project-specific settings.
352 fn worktree_id(&self, cx: &App) -> WorktreeId;
353
354 /// Converts this file into a protobuf message.
355 fn to_proto(&self, cx: &App) -> rpc::proto::File;
356
357 /// Return whether Zed considers this to be a private file.
358 fn is_private(&self) -> bool;
359}
360
361/// The file's storage status - whether it's stored (`Present`), and if so when it was last
362/// modified. In the case where the file is not stored, it can be either `New` or `Deleted`. In the
363/// UI these two states are distinguished. For example, the buffer tab does not display a deletion
364/// indicator for new files.
365#[derive(Copy, Clone, Debug, PartialEq)]
366pub enum DiskState {
367 /// File created in Zed that has not been saved.
368 New,
369 /// File present on the filesystem.
370 Present { mtime: MTime },
371 /// Deleted file that was previously present.
372 Deleted,
373}
374
375impl DiskState {
376 /// Returns the file's last known modification time on disk.
377 pub fn mtime(self) -> Option<MTime> {
378 match self {
379 DiskState::New => None,
380 DiskState::Present { mtime } => Some(mtime),
381 DiskState::Deleted => None,
382 }
383 }
384
385 pub fn exists(&self) -> bool {
386 match self {
387 DiskState::New => false,
388 DiskState::Present { .. } => true,
389 DiskState::Deleted => false,
390 }
391 }
392}
393
394/// The file associated with a buffer, in the case where the file is on the local disk.
395pub trait LocalFile: File {
396 /// Returns the absolute path of this file
397 fn abs_path(&self, cx: &App) -> PathBuf;
398
399 /// Loads the file contents from disk and returns them as a UTF-8 encoded string.
400 fn load(&self, cx: &App) -> Task<Result<String>>;
401
402 /// Loads the file's contents from disk.
403 fn load_bytes(&self, cx: &App) -> Task<Result<Vec<u8>>>;
404}
405
406/// The auto-indent behavior associated with an editing operation.
407/// For some editing operations, each affected line of text has its
408/// indentation recomputed. For other operations, the entire block
409/// of edited text is adjusted uniformly.
410#[derive(Clone, Debug)]
411pub enum AutoindentMode {
412 /// Indent each line of inserted text.
413 EachLine,
414 /// Apply the same indentation adjustment to all of the lines
415 /// in a given insertion.
416 Block {
417 /// The original indentation column of the first line of each
418 /// insertion, if it has been copied.
419 ///
420 /// Knowing this makes it possible to preserve the relative indentation
421 /// of every line in the insertion from when it was copied.
422 ///
423 /// If the original indent column is `a`, and the first line of insertion
424 /// is then auto-indented to column `b`, then every other line of
425 /// the insertion will be auto-indented to column `b - a`
426 original_indent_columns: Vec<Option<u32>>,
427 },
428}
429
430#[derive(Clone)]
431struct AutoindentRequest {
432 before_edit: BufferSnapshot,
433 entries: Vec<AutoindentRequestEntry>,
434 is_block_mode: bool,
435 ignore_empty_lines: bool,
436}
437
438#[derive(Debug, Clone)]
439struct AutoindentRequestEntry {
440 /// A range of the buffer whose indentation should be adjusted.
441 range: Range<Anchor>,
442 /// Whether or not these lines should be considered brand new, for the
443 /// purpose of auto-indent. When text is not new, its indentation will
444 /// only be adjusted if the suggested indentation level has *changed*
445 /// since the edit was made.
446 first_line_is_new: bool,
447 indent_size: IndentSize,
448 original_indent_column: Option<u32>,
449}
450
451#[derive(Debug)]
452struct IndentSuggestion {
453 basis_row: u32,
454 delta: Ordering,
455 within_error: bool,
456}
457
458struct BufferChunkHighlights<'a> {
459 captures: SyntaxMapCaptures<'a>,
460 next_capture: Option<SyntaxMapCapture<'a>>,
461 stack: Vec<(usize, HighlightId)>,
462 highlight_maps: Vec<HighlightMap>,
463}
464
465/// An iterator that yields chunks of a buffer's text, along with their
466/// syntax highlights and diagnostic status.
467pub struct BufferChunks<'a> {
468 buffer_snapshot: Option<&'a BufferSnapshot>,
469 range: Range<usize>,
470 chunks: text::Chunks<'a>,
471 diagnostic_endpoints: Option<Peekable<vec::IntoIter<DiagnosticEndpoint>>>,
472 error_depth: usize,
473 warning_depth: usize,
474 information_depth: usize,
475 hint_depth: usize,
476 unnecessary_depth: usize,
477 underline: bool,
478 highlights: Option<BufferChunkHighlights<'a>>,
479}
480
481/// A chunk of a buffer's text, along with its syntax highlight and
482/// diagnostic status.
483#[derive(Clone, Debug, Default)]
484pub struct Chunk<'a> {
485 /// The text of the chunk.
486 pub text: &'a str,
487 /// The syntax highlighting style of the chunk.
488 pub syntax_highlight_id: Option<HighlightId>,
489 /// The highlight style that has been applied to this chunk in
490 /// the editor.
491 pub highlight_style: Option<HighlightStyle>,
492 /// The severity of diagnostic associated with this chunk, if any.
493 pub diagnostic_severity: Option<DiagnosticSeverity>,
494 /// Whether this chunk of text is marked as unnecessary.
495 pub is_unnecessary: bool,
496 /// Whether this chunk of text was originally a tab character.
497 pub is_tab: bool,
498 /// Whether this chunk of text was originally a tab character.
499 pub is_inlay: bool,
500 /// Whether to underline the corresponding text range in the editor.
501 pub underline: bool,
502}
503
504/// A set of edits to a given version of a buffer, computed asynchronously.
505#[derive(Debug)]
506pub struct Diff {
507 pub base_version: clock::Global,
508 pub line_ending: LineEnding,
509 pub edits: Vec<(Range<usize>, Arc<str>)>,
510}
511
512#[derive(Debug, Clone, Copy)]
513pub(crate) struct DiagnosticEndpoint {
514 offset: usize,
515 is_start: bool,
516 underline: bool,
517 severity: DiagnosticSeverity,
518 is_unnecessary: bool,
519}
520
521/// A class of characters, used for characterizing a run of text.
522#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
523pub enum CharKind {
524 /// Whitespace.
525 Whitespace,
526 /// Punctuation.
527 Punctuation,
528 /// Word.
529 Word,
530}
531
532/// A runnable is a set of data about a region that could be resolved into a task
533pub struct Runnable {
534 pub tags: SmallVec<[RunnableTag; 1]>,
535 pub language: Arc<Language>,
536 pub buffer: BufferId,
537}
538
539#[derive(Default, Clone, Debug)]
540pub struct HighlightedText {
541 pub text: SharedString,
542 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
543}
544
545#[derive(Default, Debug)]
546struct HighlightedTextBuilder {
547 pub text: String,
548 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
549}
550
551impl HighlightedText {
552 pub fn from_buffer_range<T: ToOffset>(
553 range: Range<T>,
554 snapshot: &text::BufferSnapshot,
555 syntax_snapshot: &SyntaxSnapshot,
556 override_style: Option<HighlightStyle>,
557 syntax_theme: &SyntaxTheme,
558 ) -> Self {
559 let mut highlighted_text = HighlightedTextBuilder::default();
560 highlighted_text.add_text_from_buffer_range(
561 range,
562 snapshot,
563 syntax_snapshot,
564 override_style,
565 syntax_theme,
566 );
567 highlighted_text.build()
568 }
569
570 pub fn to_styled_text(&self, default_style: &TextStyle) -> StyledText {
571 gpui::StyledText::new(self.text.clone())
572 .with_default_highlights(default_style, self.highlights.iter().cloned())
573 }
574
575 /// Returns the first line without leading whitespace unless highlighted
576 /// and a boolean indicating if there are more lines after
577 pub fn first_line_preview(self) -> (Self, bool) {
578 let newline_ix = self.text.find('\n').unwrap_or(self.text.len());
579 let first_line = &self.text[..newline_ix];
580
581 // Trim leading whitespace, unless an edit starts prior to it.
582 let mut preview_start_ix = first_line.len() - first_line.trim_start().len();
583 if let Some((first_highlight_range, _)) = self.highlights.first() {
584 preview_start_ix = preview_start_ix.min(first_highlight_range.start);
585 }
586
587 let preview_text = &first_line[preview_start_ix..];
588 let preview_highlights = self
589 .highlights
590 .into_iter()
591 .take_while(|(range, _)| range.start < newline_ix)
592 .filter_map(|(mut range, highlight)| {
593 range.start = range.start.saturating_sub(preview_start_ix);
594 range.end = range.end.saturating_sub(preview_start_ix).min(newline_ix);
595 if range.is_empty() {
596 None
597 } else {
598 Some((range, highlight))
599 }
600 });
601
602 let preview = Self {
603 text: SharedString::new(preview_text),
604 highlights: preview_highlights.collect(),
605 };
606
607 (preview, self.text.len() > newline_ix)
608 }
609}
610
611impl HighlightedTextBuilder {
612 pub fn build(self) -> HighlightedText {
613 HighlightedText {
614 text: self.text.into(),
615 highlights: self.highlights,
616 }
617 }
618
619 pub fn add_text_from_buffer_range<T: ToOffset>(
620 &mut self,
621 range: Range<T>,
622 snapshot: &text::BufferSnapshot,
623 syntax_snapshot: &SyntaxSnapshot,
624 override_style: Option<HighlightStyle>,
625 syntax_theme: &SyntaxTheme,
626 ) {
627 let range = range.to_offset(snapshot);
628 for chunk in Self::highlighted_chunks(range, snapshot, syntax_snapshot) {
629 let start = self.text.len();
630 self.text.push_str(chunk.text);
631 let end = self.text.len();
632
633 if let Some(mut highlight_style) = chunk
634 .syntax_highlight_id
635 .and_then(|id| id.style(syntax_theme))
636 {
637 if let Some(override_style) = override_style {
638 highlight_style.highlight(override_style);
639 }
640 self.highlights.push((start..end, highlight_style));
641 } else if let Some(override_style) = override_style {
642 self.highlights.push((start..end, override_style));
643 }
644 }
645 }
646
647 fn highlighted_chunks<'a>(
648 range: Range<usize>,
649 snapshot: &'a text::BufferSnapshot,
650 syntax_snapshot: &'a SyntaxSnapshot,
651 ) -> BufferChunks<'a> {
652 let captures = syntax_snapshot.captures(range.clone(), snapshot, |grammar| {
653 grammar.highlights_query.as_ref()
654 });
655
656 let highlight_maps = captures
657 .grammars()
658 .iter()
659 .map(|grammar| grammar.highlight_map())
660 .collect();
661
662 BufferChunks::new(
663 snapshot.as_rope(),
664 range,
665 Some((captures, highlight_maps)),
666 false,
667 None,
668 )
669 }
670}
671
672#[derive(Clone)]
673pub struct EditPreview {
674 old_snapshot: text::BufferSnapshot,
675 applied_edits_snapshot: text::BufferSnapshot,
676 syntax_snapshot: SyntaxSnapshot,
677}
678
679impl EditPreview {
680 pub fn highlight_edits(
681 &self,
682 current_snapshot: &BufferSnapshot,
683 edits: &[(Range<Anchor>, String)],
684 include_deletions: bool,
685 cx: &App,
686 ) -> HighlightedText {
687 let Some(visible_range_in_preview_snapshot) = self.compute_visible_range(edits) else {
688 return HighlightedText::default();
689 };
690
691 let mut highlighted_text = HighlightedTextBuilder::default();
692
693 let mut offset_in_preview_snapshot = visible_range_in_preview_snapshot.start;
694
695 let insertion_highlight_style = HighlightStyle {
696 background_color: Some(cx.theme().status().created_background),
697 ..Default::default()
698 };
699 let deletion_highlight_style = HighlightStyle {
700 background_color: Some(cx.theme().status().deleted_background),
701 ..Default::default()
702 };
703 let syntax_theme = cx.theme().syntax();
704
705 for (range, edit_text) in edits {
706 let edit_new_end_in_preview_snapshot = range
707 .end
708 .bias_right(&self.old_snapshot)
709 .to_offset(&self.applied_edits_snapshot);
710 let edit_start_in_preview_snapshot = edit_new_end_in_preview_snapshot - edit_text.len();
711
712 let unchanged_range_in_preview_snapshot =
713 offset_in_preview_snapshot..edit_start_in_preview_snapshot;
714 if !unchanged_range_in_preview_snapshot.is_empty() {
715 highlighted_text.add_text_from_buffer_range(
716 unchanged_range_in_preview_snapshot,
717 &self.applied_edits_snapshot,
718 &self.syntax_snapshot,
719 None,
720 &syntax_theme,
721 );
722 }
723
724 let range_in_current_snapshot = range.to_offset(current_snapshot);
725 if include_deletions && !range_in_current_snapshot.is_empty() {
726 highlighted_text.add_text_from_buffer_range(
727 range_in_current_snapshot,
728 ¤t_snapshot.text,
729 ¤t_snapshot.syntax,
730 Some(deletion_highlight_style),
731 &syntax_theme,
732 );
733 }
734
735 if !edit_text.is_empty() {
736 highlighted_text.add_text_from_buffer_range(
737 edit_start_in_preview_snapshot..edit_new_end_in_preview_snapshot,
738 &self.applied_edits_snapshot,
739 &self.syntax_snapshot,
740 Some(insertion_highlight_style),
741 &syntax_theme,
742 );
743 }
744
745 offset_in_preview_snapshot = edit_new_end_in_preview_snapshot;
746 }
747
748 highlighted_text.add_text_from_buffer_range(
749 offset_in_preview_snapshot..visible_range_in_preview_snapshot.end,
750 &self.applied_edits_snapshot,
751 &self.syntax_snapshot,
752 None,
753 &syntax_theme,
754 );
755
756 highlighted_text.build()
757 }
758
759 fn compute_visible_range(&self, edits: &[(Range<Anchor>, String)]) -> Option<Range<usize>> {
760 let (first, _) = edits.first()?;
761 let (last, _) = edits.last()?;
762
763 let start = first
764 .start
765 .bias_left(&self.old_snapshot)
766 .to_point(&self.applied_edits_snapshot);
767 let end = last
768 .end
769 .bias_right(&self.old_snapshot)
770 .to_point(&self.applied_edits_snapshot);
771
772 // Ensure that the first line of the first edit and the last line of the last edit are always fully visible
773 let range = Point::new(start.row, 0)
774 ..Point::new(end.row, self.applied_edits_snapshot.line_len(end.row));
775
776 Some(range.to_offset(&self.applied_edits_snapshot))
777 }
778}
779
780#[derive(Clone, Debug, PartialEq, Eq)]
781pub struct BracketMatch {
782 pub open_range: Range<usize>,
783 pub close_range: Range<usize>,
784 pub newline_only: bool,
785}
786
787impl Buffer {
788 /// Create a new buffer with the given base text.
789 pub fn local<T: Into<String>>(base_text: T, cx: &Context<Self>) -> Self {
790 Self::build(
791 TextBuffer::new(0, cx.entity_id().as_non_zero_u64().into(), base_text.into()),
792 None,
793 Capability::ReadWrite,
794 )
795 }
796
797 /// Create a new buffer with the given base text that has proper line endings and other normalization applied.
798 pub fn local_normalized(
799 base_text_normalized: Rope,
800 line_ending: LineEnding,
801 cx: &Context<Self>,
802 ) -> Self {
803 Self::build(
804 TextBuffer::new_normalized(
805 0,
806 cx.entity_id().as_non_zero_u64().into(),
807 line_ending,
808 base_text_normalized,
809 ),
810 None,
811 Capability::ReadWrite,
812 )
813 }
814
815 /// Create a new buffer that is a replica of a remote buffer.
816 pub fn remote(
817 remote_id: BufferId,
818 replica_id: ReplicaId,
819 capability: Capability,
820 base_text: impl Into<String>,
821 ) -> Self {
822 Self::build(
823 TextBuffer::new(replica_id, remote_id, base_text.into()),
824 None,
825 capability,
826 )
827 }
828
829 /// Create a new buffer that is a replica of a remote buffer, populating its
830 /// state from the given protobuf message.
831 pub fn from_proto(
832 replica_id: ReplicaId,
833 capability: Capability,
834 message: proto::BufferState,
835 file: Option<Arc<dyn File>>,
836 ) -> Result<Self> {
837 let buffer_id = BufferId::new(message.id).context("Could not deserialize buffer_id")?;
838 let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text);
839 let mut this = Self::build(buffer, file, capability);
840 this.text.set_line_ending(proto::deserialize_line_ending(
841 rpc::proto::LineEnding::from_i32(message.line_ending).context("missing line_ending")?,
842 ));
843 this.saved_version = proto::deserialize_version(&message.saved_version);
844 this.saved_mtime = message.saved_mtime.map(|time| time.into());
845 Ok(this)
846 }
847
848 /// Serialize the buffer's state to a protobuf message.
849 pub fn to_proto(&self, cx: &App) -> proto::BufferState {
850 proto::BufferState {
851 id: self.remote_id().into(),
852 file: self.file.as_ref().map(|f| f.to_proto(cx)),
853 base_text: self.base_text().to_string(),
854 line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
855 saved_version: proto::serialize_version(&self.saved_version),
856 saved_mtime: self.saved_mtime.map(|time| time.into()),
857 }
858 }
859
860 /// Serialize as protobufs all of the changes to the buffer since the given version.
861 pub fn serialize_ops(
862 &self,
863 since: Option<clock::Global>,
864 cx: &App,
865 ) -> Task<Vec<proto::Operation>> {
866 let mut operations = Vec::new();
867 operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
868
869 operations.extend(self.remote_selections.iter().map(|(_, set)| {
870 proto::serialize_operation(&Operation::UpdateSelections {
871 selections: set.selections.clone(),
872 lamport_timestamp: set.lamport_timestamp,
873 line_mode: set.line_mode,
874 cursor_shape: set.cursor_shape,
875 })
876 }));
877
878 for (server_id, diagnostics) in &self.diagnostics {
879 operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
880 lamport_timestamp: self.diagnostics_timestamp,
881 server_id: *server_id,
882 diagnostics: diagnostics.iter().cloned().collect(),
883 }));
884 }
885
886 for (server_id, completions) in &self.completion_triggers_per_language_server {
887 operations.push(proto::serialize_operation(
888 &Operation::UpdateCompletionTriggers {
889 triggers: completions.iter().cloned().collect(),
890 lamport_timestamp: self.completion_triggers_timestamp,
891 server_id: *server_id,
892 },
893 ));
894 }
895
896 let text_operations = self.text.operations().clone();
897 cx.background_spawn(async move {
898 let since = since.unwrap_or_default();
899 operations.extend(
900 text_operations
901 .iter()
902 .filter(|(_, op)| !since.observed(op.timestamp()))
903 .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
904 );
905 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
906 operations
907 })
908 }
909
910 /// Assign a language to the buffer, returning the buffer.
911 pub fn with_language(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
912 self.set_language(Some(language), cx);
913 self
914 }
915
916 /// Returns the [`Capability`] of this buffer.
917 pub fn capability(&self) -> Capability {
918 self.capability
919 }
920
921 /// Whether this buffer can only be read.
922 pub fn read_only(&self) -> bool {
923 self.capability == Capability::ReadOnly
924 }
925
926 /// Builds a [`Buffer`] with the given underlying [`TextBuffer`], diff base, [`File`] and [`Capability`].
927 pub fn build(buffer: TextBuffer, file: Option<Arc<dyn File>>, capability: Capability) -> Self {
928 let saved_mtime = file.as_ref().and_then(|file| file.disk_state().mtime());
929 let snapshot = buffer.snapshot();
930 let syntax_map = Mutex::new(SyntaxMap::new(&snapshot));
931 Self {
932 saved_mtime,
933 saved_version: buffer.version(),
934 preview_version: buffer.version(),
935 reload_task: None,
936 transaction_depth: 0,
937 was_dirty_before_starting_transaction: None,
938 has_unsaved_edits: Cell::new((buffer.version(), false)),
939 text: buffer,
940 branch_state: None,
941 file,
942 capability,
943 syntax_map,
944 reparse: None,
945 non_text_state_update_count: 0,
946 sync_parse_timeout: Duration::from_millis(1),
947 parse_status: watch::channel(ParseStatus::Idle),
948 autoindent_requests: Default::default(),
949 pending_autoindent: Default::default(),
950 language: None,
951 remote_selections: Default::default(),
952 diagnostics: Default::default(),
953 diagnostics_timestamp: Default::default(),
954 completion_triggers: Default::default(),
955 completion_triggers_per_language_server: Default::default(),
956 completion_triggers_timestamp: Default::default(),
957 deferred_ops: OperationQueue::new(),
958 has_conflict: false,
959 pull_diagnostics_result_id: None,
960 change_bits: Default::default(),
961 _subscriptions: Vec::new(),
962 }
963 }
964
965 pub fn build_snapshot(
966 text: Rope,
967 language: Option<Arc<Language>>,
968 language_registry: Option<Arc<LanguageRegistry>>,
969 cx: &mut App,
970 ) -> impl Future<Output = BufferSnapshot> + use<> {
971 let entity_id = cx.reserve_entity::<Self>().entity_id();
972 let buffer_id = entity_id.as_non_zero_u64().into();
973 async move {
974 let text =
975 TextBuffer::new_normalized(0, buffer_id, Default::default(), text).snapshot();
976 let mut syntax = SyntaxMap::new(&text).snapshot();
977 if let Some(language) = language.clone() {
978 let text = text.clone();
979 let language = language.clone();
980 let language_registry = language_registry.clone();
981 syntax.reparse(&text, language_registry, language);
982 }
983 BufferSnapshot {
984 text,
985 syntax,
986 file: None,
987 diagnostics: Default::default(),
988 remote_selections: Default::default(),
989 language,
990 non_text_state_update_count: 0,
991 }
992 }
993 }
994
995 pub fn build_empty_snapshot(cx: &mut App) -> BufferSnapshot {
996 let entity_id = cx.reserve_entity::<Self>().entity_id();
997 let buffer_id = entity_id.as_non_zero_u64().into();
998 let text =
999 TextBuffer::new_normalized(0, buffer_id, Default::default(), Rope::new()).snapshot();
1000 let syntax = SyntaxMap::new(&text).snapshot();
1001 BufferSnapshot {
1002 text,
1003 syntax,
1004 file: None,
1005 diagnostics: Default::default(),
1006 remote_selections: Default::default(),
1007 language: None,
1008 non_text_state_update_count: 0,
1009 }
1010 }
1011
1012 #[cfg(any(test, feature = "test-support"))]
1013 pub fn build_snapshot_sync(
1014 text: Rope,
1015 language: Option<Arc<Language>>,
1016 language_registry: Option<Arc<LanguageRegistry>>,
1017 cx: &mut App,
1018 ) -> BufferSnapshot {
1019 let entity_id = cx.reserve_entity::<Self>().entity_id();
1020 let buffer_id = entity_id.as_non_zero_u64().into();
1021 let text = TextBuffer::new_normalized(0, buffer_id, Default::default(), text).snapshot();
1022 let mut syntax = SyntaxMap::new(&text).snapshot();
1023 if let Some(language) = language.clone() {
1024 let text = text.clone();
1025 let language = language.clone();
1026 let language_registry = language_registry.clone();
1027 syntax.reparse(&text, language_registry, language);
1028 }
1029 BufferSnapshot {
1030 text,
1031 syntax,
1032 file: None,
1033 diagnostics: Default::default(),
1034 remote_selections: Default::default(),
1035 language,
1036 non_text_state_update_count: 0,
1037 }
1038 }
1039
1040 /// Retrieve a snapshot of the buffer's current state. This is computationally
1041 /// cheap, and allows reading from the buffer on a background thread.
1042 pub fn snapshot(&self) -> BufferSnapshot {
1043 let text = self.text.snapshot();
1044 let mut syntax_map = self.syntax_map.lock();
1045 syntax_map.interpolate(&text);
1046 let syntax = syntax_map.snapshot();
1047
1048 BufferSnapshot {
1049 text,
1050 syntax,
1051 file: self.file.clone(),
1052 remote_selections: self.remote_selections.clone(),
1053 diagnostics: self.diagnostics.clone(),
1054 language: self.language.clone(),
1055 non_text_state_update_count: self.non_text_state_update_count,
1056 }
1057 }
1058
1059 pub fn branch(&mut self, cx: &mut Context<Self>) -> Entity<Self> {
1060 let this = cx.entity();
1061 cx.new(|cx| {
1062 let mut branch = Self {
1063 branch_state: Some(BufferBranchState {
1064 base_buffer: this.clone(),
1065 merged_operations: Default::default(),
1066 }),
1067 language: self.language.clone(),
1068 has_conflict: self.has_conflict,
1069 has_unsaved_edits: Cell::new(self.has_unsaved_edits.get_mut().clone()),
1070 _subscriptions: vec![cx.subscribe(&this, Self::on_base_buffer_event)],
1071 ..Self::build(self.text.branch(), self.file.clone(), self.capability())
1072 };
1073 if let Some(language_registry) = self.language_registry() {
1074 branch.set_language_registry(language_registry);
1075 }
1076
1077 // Reparse the branch buffer so that we get syntax highlighting immediately.
1078 branch.reparse(cx);
1079
1080 branch
1081 })
1082 }
1083
1084 pub fn preview_edits(
1085 &self,
1086 edits: Arc<[(Range<Anchor>, String)]>,
1087 cx: &App,
1088 ) -> Task<EditPreview> {
1089 let registry = self.language_registry();
1090 let language = self.language().cloned();
1091 let old_snapshot = self.text.snapshot();
1092 let mut branch_buffer = self.text.branch();
1093 let mut syntax_snapshot = self.syntax_map.lock().snapshot();
1094 cx.background_spawn(async move {
1095 if !edits.is_empty() {
1096 if let Some(language) = language.clone() {
1097 syntax_snapshot.reparse(&old_snapshot, registry.clone(), language);
1098 }
1099
1100 branch_buffer.edit(edits.iter().cloned());
1101 let snapshot = branch_buffer.snapshot();
1102 syntax_snapshot.interpolate(&snapshot);
1103
1104 if let Some(language) = language {
1105 syntax_snapshot.reparse(&snapshot, registry, language);
1106 }
1107 }
1108 EditPreview {
1109 old_snapshot,
1110 applied_edits_snapshot: branch_buffer.snapshot(),
1111 syntax_snapshot,
1112 }
1113 })
1114 }
1115
1116 /// Applies all of the changes in this buffer that intersect any of the
1117 /// given `ranges` to its base buffer.
1118 ///
1119 /// If `ranges` is empty, then all changes will be applied. This buffer must
1120 /// be a branch buffer to call this method.
1121 pub fn merge_into_base(&mut self, ranges: Vec<Range<usize>>, cx: &mut Context<Self>) {
1122 let Some(base_buffer) = self.base_buffer() else {
1123 debug_panic!("not a branch buffer");
1124 return;
1125 };
1126
1127 let mut ranges = if ranges.is_empty() {
1128 &[0..usize::MAX]
1129 } else {
1130 ranges.as_slice()
1131 }
1132 .into_iter()
1133 .peekable();
1134
1135 let mut edits = Vec::new();
1136 for edit in self.edits_since::<usize>(&base_buffer.read(cx).version()) {
1137 let mut is_included = false;
1138 while let Some(range) = ranges.peek() {
1139 if range.end < edit.new.start {
1140 ranges.next().unwrap();
1141 } else {
1142 if range.start <= edit.new.end {
1143 is_included = true;
1144 }
1145 break;
1146 }
1147 }
1148
1149 if is_included {
1150 edits.push((
1151 edit.old.clone(),
1152 self.text_for_range(edit.new.clone()).collect::<String>(),
1153 ));
1154 }
1155 }
1156
1157 let operation = base_buffer.update(cx, |base_buffer, cx| {
1158 // cx.emit(BufferEvent::DiffBaseChanged);
1159 base_buffer.edit(edits, None, cx)
1160 });
1161
1162 if let Some(operation) = operation {
1163 if let Some(BufferBranchState {
1164 merged_operations, ..
1165 }) = &mut self.branch_state
1166 {
1167 merged_operations.push(operation);
1168 }
1169 }
1170 }
1171
1172 fn on_base_buffer_event(
1173 &mut self,
1174 _: Entity<Buffer>,
1175 event: &BufferEvent,
1176 cx: &mut Context<Self>,
1177 ) {
1178 let BufferEvent::Operation { operation, .. } = event else {
1179 return;
1180 };
1181 let Some(BufferBranchState {
1182 merged_operations, ..
1183 }) = &mut self.branch_state
1184 else {
1185 return;
1186 };
1187
1188 let mut operation_to_undo = None;
1189 if let Operation::Buffer(text::Operation::Edit(operation)) = &operation {
1190 if let Ok(ix) = merged_operations.binary_search(&operation.timestamp) {
1191 merged_operations.remove(ix);
1192 operation_to_undo = Some(operation.timestamp);
1193 }
1194 }
1195
1196 self.apply_ops([operation.clone()], cx);
1197
1198 if let Some(timestamp) = operation_to_undo {
1199 let counts = [(timestamp, u32::MAX)].into_iter().collect();
1200 self.undo_operations(counts, cx);
1201 }
1202 }
1203
1204 #[cfg(test)]
1205 pub(crate) fn as_text_snapshot(&self) -> &text::BufferSnapshot {
1206 &self.text
1207 }
1208
1209 /// Retrieve a snapshot of the buffer's raw text, without any
1210 /// language-related state like the syntax tree or diagnostics.
1211 pub fn text_snapshot(&self) -> text::BufferSnapshot {
1212 self.text.snapshot()
1213 }
1214
1215 /// The file associated with the buffer, if any.
1216 pub fn file(&self) -> Option<&Arc<dyn File>> {
1217 self.file.as_ref()
1218 }
1219
1220 /// The version of the buffer that was last saved or reloaded from disk.
1221 pub fn saved_version(&self) -> &clock::Global {
1222 &self.saved_version
1223 }
1224
1225 /// The mtime of the buffer's file when the buffer was last saved or reloaded from disk.
1226 pub fn saved_mtime(&self) -> Option<MTime> {
1227 self.saved_mtime
1228 }
1229
1230 /// Assign a language to the buffer.
1231 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1232 self.non_text_state_update_count += 1;
1233 self.syntax_map.lock().clear(&self.text);
1234 self.language = language;
1235 self.was_changed();
1236 self.reparse(cx);
1237 cx.emit(BufferEvent::LanguageChanged);
1238 }
1239
1240 /// Assign a language registry to the buffer. This allows the buffer to retrieve
1241 /// other languages if parts of the buffer are written in different languages.
1242 pub fn set_language_registry(&self, language_registry: Arc<LanguageRegistry>) {
1243 self.syntax_map
1244 .lock()
1245 .set_language_registry(language_registry);
1246 }
1247
1248 pub fn language_registry(&self) -> Option<Arc<LanguageRegistry>> {
1249 self.syntax_map.lock().language_registry()
1250 }
1251
1252 /// Assign the buffer a new [`Capability`].
1253 pub fn set_capability(&mut self, capability: Capability, cx: &mut Context<Self>) {
1254 self.capability = capability;
1255 cx.emit(BufferEvent::CapabilityChanged)
1256 }
1257
1258 /// This method is called to signal that the buffer has been saved.
1259 pub fn did_save(
1260 &mut self,
1261 version: clock::Global,
1262 mtime: Option<MTime>,
1263 cx: &mut Context<Self>,
1264 ) {
1265 self.saved_version = version;
1266 self.has_unsaved_edits
1267 .set((self.saved_version().clone(), false));
1268 self.has_conflict = false;
1269 self.saved_mtime = mtime;
1270 self.was_changed();
1271 cx.emit(BufferEvent::Saved);
1272 cx.notify();
1273 }
1274
1275 /// This method is called to signal that the buffer has been discarded.
1276 pub fn discarded(&self, cx: &mut Context<Self>) {
1277 cx.emit(BufferEvent::Discarded);
1278 cx.notify();
1279 }
1280
1281 /// Reloads the contents of the buffer from disk.
1282 pub fn reload(&mut self, cx: &Context<Self>) -> oneshot::Receiver<Option<Transaction>> {
1283 let (tx, rx) = futures::channel::oneshot::channel();
1284 let prev_version = self.text.version();
1285 self.reload_task = Some(cx.spawn(async move |this, cx| {
1286 let Some((new_mtime, new_text)) = this.update(cx, |this, cx| {
1287 let file = this.file.as_ref()?.as_local()?;
1288
1289 Some((file.disk_state().mtime(), file.load(cx)))
1290 })?
1291 else {
1292 return Ok(());
1293 };
1294
1295 let new_text = new_text.await?;
1296 let diff = this
1297 .update(cx, |this, cx| this.diff(new_text.clone(), cx))?
1298 .await;
1299 this.update(cx, |this, cx| {
1300 if this.version() == diff.base_version {
1301 this.finalize_last_transaction();
1302 this.apply_diff(diff, cx);
1303 tx.send(this.finalize_last_transaction().cloned()).ok();
1304 this.has_conflict = false;
1305 this.did_reload(this.version(), this.line_ending(), new_mtime, cx);
1306 } else {
1307 if !diff.edits.is_empty()
1308 || this
1309 .edits_since::<usize>(&diff.base_version)
1310 .next()
1311 .is_some()
1312 {
1313 this.has_conflict = true;
1314 }
1315
1316 this.did_reload(prev_version, this.line_ending(), this.saved_mtime, cx);
1317 }
1318
1319 this.reload_task.take();
1320 })
1321 }));
1322 rx
1323 }
1324
1325 /// This method is called to signal that the buffer has been reloaded.
1326 pub fn did_reload(
1327 &mut self,
1328 version: clock::Global,
1329 line_ending: LineEnding,
1330 mtime: Option<MTime>,
1331 cx: &mut Context<Self>,
1332 ) {
1333 self.saved_version = version;
1334 self.has_unsaved_edits
1335 .set((self.saved_version.clone(), false));
1336 self.text.set_line_ending(line_ending);
1337 self.saved_mtime = mtime;
1338 cx.emit(BufferEvent::Reloaded);
1339 cx.notify();
1340 }
1341
1342 /// Updates the [`File`] backing this buffer. This should be called when
1343 /// the file has changed or has been deleted.
1344 pub fn file_updated(&mut self, new_file: Arc<dyn File>, cx: &mut Context<Self>) {
1345 let was_dirty = self.is_dirty();
1346 let mut file_changed = false;
1347
1348 if let Some(old_file) = self.file.as_ref() {
1349 if new_file.path() != old_file.path() {
1350 file_changed = true;
1351 }
1352
1353 let old_state = old_file.disk_state();
1354 let new_state = new_file.disk_state();
1355 if old_state != new_state {
1356 file_changed = true;
1357 if !was_dirty && matches!(new_state, DiskState::Present { .. }) {
1358 cx.emit(BufferEvent::ReloadNeeded)
1359 }
1360 }
1361 } else {
1362 file_changed = true;
1363 };
1364
1365 self.file = Some(new_file);
1366 if file_changed {
1367 self.was_changed();
1368 self.non_text_state_update_count += 1;
1369 if was_dirty != self.is_dirty() {
1370 cx.emit(BufferEvent::DirtyChanged);
1371 }
1372 cx.emit(BufferEvent::FileHandleChanged);
1373 cx.notify();
1374 }
1375 }
1376
1377 pub fn base_buffer(&self) -> Option<Entity<Self>> {
1378 Some(self.branch_state.as_ref()?.base_buffer.clone())
1379 }
1380
1381 /// Returns the primary [`Language`] assigned to this [`Buffer`].
1382 pub fn language(&self) -> Option<&Arc<Language>> {
1383 self.language.as_ref()
1384 }
1385
1386 /// Returns the [`Language`] at the given location.
1387 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
1388 let offset = position.to_offset(self);
1389 let mut is_first = true;
1390 let start_anchor = self.anchor_before(offset);
1391 let end_anchor = self.anchor_after(offset);
1392 self.syntax_map
1393 .lock()
1394 .layers_for_range(offset..offset, &self.text, false)
1395 .filter(|layer| {
1396 if is_first {
1397 is_first = false;
1398 return true;
1399 }
1400 let any_sub_ranges_contain_range = layer
1401 .included_sub_ranges
1402 .map(|sub_ranges| {
1403 sub_ranges.iter().any(|sub_range| {
1404 let is_before_start = sub_range.end.cmp(&start_anchor, self).is_lt();
1405 let is_after_end = sub_range.start.cmp(&end_anchor, self).is_gt();
1406 !is_before_start && !is_after_end
1407 })
1408 })
1409 .unwrap_or(true);
1410 let result = any_sub_ranges_contain_range;
1411 return result;
1412 })
1413 .last()
1414 .map(|info| info.language.clone())
1415 .or_else(|| self.language.clone())
1416 }
1417
1418 /// Returns each [`Language`] for the active syntax layers at the given location.
1419 pub fn languages_at<D: ToOffset>(&self, position: D) -> Vec<Arc<Language>> {
1420 let offset = position.to_offset(self);
1421 let mut languages: Vec<Arc<Language>> = self
1422 .syntax_map
1423 .lock()
1424 .layers_for_range(offset..offset, &self.text, false)
1425 .map(|info| info.language.clone())
1426 .collect();
1427
1428 if languages.is_empty() {
1429 if let Some(buffer_language) = self.language() {
1430 languages.push(buffer_language.clone());
1431 }
1432 }
1433
1434 languages
1435 }
1436
1437 /// An integer version number that accounts for all updates besides
1438 /// the buffer's text itself (which is versioned via a version vector).
1439 pub fn non_text_state_update_count(&self) -> usize {
1440 self.non_text_state_update_count
1441 }
1442
1443 /// Whether the buffer is being parsed in the background.
1444 #[cfg(any(test, feature = "test-support"))]
1445 pub fn is_parsing(&self) -> bool {
1446 self.reparse.is_some()
1447 }
1448
1449 /// Indicates whether the buffer contains any regions that may be
1450 /// written in a language that hasn't been loaded yet.
1451 pub fn contains_unknown_injections(&self) -> bool {
1452 self.syntax_map.lock().contains_unknown_injections()
1453 }
1454
1455 #[cfg(test)]
1456 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
1457 self.sync_parse_timeout = timeout;
1458 }
1459
1460 /// Called after an edit to synchronize the buffer's main parse tree with
1461 /// the buffer's new underlying state.
1462 ///
1463 /// Locks the syntax map and interpolates the edits since the last reparse
1464 /// into the foreground syntax tree.
1465 ///
1466 /// Then takes a stable snapshot of the syntax map before unlocking it.
1467 /// The snapshot with the interpolated edits is sent to a background thread,
1468 /// where we ask Tree-sitter to perform an incremental parse.
1469 ///
1470 /// Meanwhile, in the foreground, we block the main thread for up to 1ms
1471 /// waiting on the parse to complete. As soon as it completes, we proceed
1472 /// synchronously, unless a 1ms timeout elapses.
1473 ///
1474 /// If we time out waiting on the parse, we spawn a second task waiting
1475 /// until the parse does complete and return with the interpolated tree still
1476 /// in the foreground. When the background parse completes, call back into
1477 /// the main thread and assign the foreground parse state.
1478 ///
1479 /// If the buffer or grammar changed since the start of the background parse,
1480 /// initiate an additional reparse recursively. To avoid concurrent parses
1481 /// for the same buffer, we only initiate a new parse if we are not already
1482 /// parsing in the background.
1483 pub fn reparse(&mut self, cx: &mut Context<Self>) {
1484 if self.reparse.is_some() {
1485 return;
1486 }
1487 let language = if let Some(language) = self.language.clone() {
1488 language
1489 } else {
1490 return;
1491 };
1492
1493 let text = self.text_snapshot();
1494 let parsed_version = self.version();
1495
1496 let mut syntax_map = self.syntax_map.lock();
1497 syntax_map.interpolate(&text);
1498 let language_registry = syntax_map.language_registry();
1499 let mut syntax_snapshot = syntax_map.snapshot();
1500 drop(syntax_map);
1501
1502 let parse_task = cx.background_spawn({
1503 let language = language.clone();
1504 let language_registry = language_registry.clone();
1505 async move {
1506 syntax_snapshot.reparse(&text, language_registry, language);
1507 syntax_snapshot
1508 }
1509 });
1510
1511 self.parse_status.0.send(ParseStatus::Parsing).unwrap();
1512 match cx
1513 .background_executor()
1514 .block_with_timeout(self.sync_parse_timeout, parse_task)
1515 {
1516 Ok(new_syntax_snapshot) => {
1517 self.did_finish_parsing(new_syntax_snapshot, cx);
1518 self.reparse = None;
1519 }
1520 Err(parse_task) => {
1521 self.reparse = Some(cx.spawn(async move |this, cx| {
1522 let new_syntax_map = parse_task.await;
1523 this.update(cx, move |this, cx| {
1524 let grammar_changed =
1525 this.language.as_ref().map_or(true, |current_language| {
1526 !Arc::ptr_eq(&language, current_language)
1527 });
1528 let language_registry_changed = new_syntax_map
1529 .contains_unknown_injections()
1530 && language_registry.map_or(false, |registry| {
1531 registry.version() != new_syntax_map.language_registry_version()
1532 });
1533 let parse_again = language_registry_changed
1534 || grammar_changed
1535 || this.version.changed_since(&parsed_version);
1536 this.did_finish_parsing(new_syntax_map, cx);
1537 this.reparse = None;
1538 if parse_again {
1539 this.reparse(cx);
1540 }
1541 })
1542 .ok();
1543 }));
1544 }
1545 }
1546 }
1547
1548 fn did_finish_parsing(&mut self, syntax_snapshot: SyntaxSnapshot, cx: &mut Context<Self>) {
1549 self.was_changed();
1550 self.non_text_state_update_count += 1;
1551 self.syntax_map.lock().did_parse(syntax_snapshot);
1552 self.request_autoindent(cx);
1553 self.parse_status.0.send(ParseStatus::Idle).unwrap();
1554 cx.emit(BufferEvent::Reparsed);
1555 cx.notify();
1556 }
1557
1558 pub fn parse_status(&self) -> watch::Receiver<ParseStatus> {
1559 self.parse_status.1.clone()
1560 }
1561
1562 /// Assign to the buffer a set of diagnostics created by a given language server.
1563 pub fn update_diagnostics(
1564 &mut self,
1565 server_id: LanguageServerId,
1566 diagnostics: DiagnosticSet,
1567 cx: &mut Context<Self>,
1568 ) {
1569 let lamport_timestamp = self.text.lamport_clock.tick();
1570 let op = Operation::UpdateDiagnostics {
1571 server_id,
1572 diagnostics: diagnostics.iter().cloned().collect(),
1573 lamport_timestamp,
1574 };
1575 self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
1576 self.send_operation(op, true, cx);
1577 }
1578
1579 pub fn get_diagnostics(&self, server_id: LanguageServerId) -> Option<&DiagnosticSet> {
1580 let Ok(idx) = self.diagnostics.binary_search_by_key(&server_id, |v| v.0) else {
1581 return None;
1582 };
1583 Some(&self.diagnostics[idx].1)
1584 }
1585
1586 fn request_autoindent(&mut self, cx: &mut Context<Self>) {
1587 if let Some(indent_sizes) = self.compute_autoindents() {
1588 let indent_sizes = cx.background_spawn(indent_sizes);
1589 match cx
1590 .background_executor()
1591 .block_with_timeout(Duration::from_micros(500), indent_sizes)
1592 {
1593 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
1594 Err(indent_sizes) => {
1595 self.pending_autoindent = Some(cx.spawn(async move |this, cx| {
1596 let indent_sizes = indent_sizes.await;
1597 this.update(cx, |this, cx| {
1598 this.apply_autoindents(indent_sizes, cx);
1599 })
1600 .ok();
1601 }));
1602 }
1603 }
1604 } else {
1605 self.autoindent_requests.clear();
1606 }
1607 }
1608
1609 fn compute_autoindents(
1610 &self,
1611 ) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>> + use<>> {
1612 let max_rows_between_yields = 100;
1613 let snapshot = self.snapshot();
1614 if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
1615 return None;
1616 }
1617
1618 let autoindent_requests = self.autoindent_requests.clone();
1619 Some(async move {
1620 let mut indent_sizes = BTreeMap::<u32, (IndentSize, bool)>::new();
1621 for request in autoindent_requests {
1622 // Resolve each edited range to its row in the current buffer and in the
1623 // buffer before this batch of edits.
1624 let mut row_ranges = Vec::new();
1625 let mut old_to_new_rows = BTreeMap::new();
1626 let mut language_indent_sizes_by_new_row = Vec::new();
1627 for entry in &request.entries {
1628 let position = entry.range.start;
1629 let new_row = position.to_point(&snapshot).row;
1630 let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
1631 language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
1632
1633 if !entry.first_line_is_new {
1634 let old_row = position.to_point(&request.before_edit).row;
1635 old_to_new_rows.insert(old_row, new_row);
1636 }
1637 row_ranges.push((new_row..new_end_row, entry.original_indent_column));
1638 }
1639
1640 // Build a map containing the suggested indentation for each of the edited lines
1641 // with respect to the state of the buffer before these edits. This map is keyed
1642 // by the rows for these lines in the current state of the buffer.
1643 let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
1644 let old_edited_ranges =
1645 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
1646 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1647 let mut language_indent_size = IndentSize::default();
1648 for old_edited_range in old_edited_ranges {
1649 let suggestions = request
1650 .before_edit
1651 .suggest_autoindents(old_edited_range.clone())
1652 .into_iter()
1653 .flatten();
1654 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1655 if let Some(suggestion) = suggestion {
1656 let new_row = *old_to_new_rows.get(&old_row).unwrap();
1657
1658 // Find the indent size based on the language for this row.
1659 while let Some((row, size)) = language_indent_sizes.peek() {
1660 if *row > new_row {
1661 break;
1662 }
1663 language_indent_size = *size;
1664 language_indent_sizes.next();
1665 }
1666
1667 let suggested_indent = old_to_new_rows
1668 .get(&suggestion.basis_row)
1669 .and_then(|from_row| {
1670 Some(old_suggestions.get(from_row).copied()?.0)
1671 })
1672 .unwrap_or_else(|| {
1673 request
1674 .before_edit
1675 .indent_size_for_line(suggestion.basis_row)
1676 })
1677 .with_delta(suggestion.delta, language_indent_size);
1678 old_suggestions
1679 .insert(new_row, (suggested_indent, suggestion.within_error));
1680 }
1681 }
1682 yield_now().await;
1683 }
1684
1685 // Compute new suggestions for each line, but only include them in the result
1686 // if they differ from the old suggestion for that line.
1687 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1688 let mut language_indent_size = IndentSize::default();
1689 for (row_range, original_indent_column) in row_ranges {
1690 let new_edited_row_range = if request.is_block_mode {
1691 row_range.start..row_range.start + 1
1692 } else {
1693 row_range.clone()
1694 };
1695
1696 let suggestions = snapshot
1697 .suggest_autoindents(new_edited_row_range.clone())
1698 .into_iter()
1699 .flatten();
1700 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
1701 if let Some(suggestion) = suggestion {
1702 // Find the indent size based on the language for this row.
1703 while let Some((row, size)) = language_indent_sizes.peek() {
1704 if *row > new_row {
1705 break;
1706 }
1707 language_indent_size = *size;
1708 language_indent_sizes.next();
1709 }
1710
1711 let suggested_indent = indent_sizes
1712 .get(&suggestion.basis_row)
1713 .copied()
1714 .map(|e| e.0)
1715 .unwrap_or_else(|| {
1716 snapshot.indent_size_for_line(suggestion.basis_row)
1717 })
1718 .with_delta(suggestion.delta, language_indent_size);
1719
1720 if old_suggestions.get(&new_row).map_or(
1721 true,
1722 |(old_indentation, was_within_error)| {
1723 suggested_indent != *old_indentation
1724 && (!suggestion.within_error || *was_within_error)
1725 },
1726 ) {
1727 indent_sizes.insert(
1728 new_row,
1729 (suggested_indent, request.ignore_empty_lines),
1730 );
1731 }
1732 }
1733 }
1734
1735 if let (true, Some(original_indent_column)) =
1736 (request.is_block_mode, original_indent_column)
1737 {
1738 let new_indent =
1739 if let Some((indent, _)) = indent_sizes.get(&row_range.start) {
1740 *indent
1741 } else {
1742 snapshot.indent_size_for_line(row_range.start)
1743 };
1744 let delta = new_indent.len as i64 - original_indent_column as i64;
1745 if delta != 0 {
1746 for row in row_range.skip(1) {
1747 indent_sizes.entry(row).or_insert_with(|| {
1748 let mut size = snapshot.indent_size_for_line(row);
1749 if size.kind == new_indent.kind {
1750 match delta.cmp(&0) {
1751 Ordering::Greater => size.len += delta as u32,
1752 Ordering::Less => {
1753 size.len = size.len.saturating_sub(-delta as u32)
1754 }
1755 Ordering::Equal => {}
1756 }
1757 }
1758 (size, request.ignore_empty_lines)
1759 });
1760 }
1761 }
1762 }
1763
1764 yield_now().await;
1765 }
1766 }
1767
1768 indent_sizes
1769 .into_iter()
1770 .filter_map(|(row, (indent, ignore_empty_lines))| {
1771 if ignore_empty_lines && snapshot.line_len(row) == 0 {
1772 None
1773 } else {
1774 Some((row, indent))
1775 }
1776 })
1777 .collect()
1778 })
1779 }
1780
1781 fn apply_autoindents(
1782 &mut self,
1783 indent_sizes: BTreeMap<u32, IndentSize>,
1784 cx: &mut Context<Self>,
1785 ) {
1786 self.autoindent_requests.clear();
1787
1788 let edits: Vec<_> = indent_sizes
1789 .into_iter()
1790 .filter_map(|(row, indent_size)| {
1791 let current_size = indent_size_for_line(self, row);
1792 Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
1793 })
1794 .collect();
1795
1796 let preserve_preview = self.preserve_preview();
1797 self.edit(edits, None, cx);
1798 if preserve_preview {
1799 self.refresh_preview();
1800 }
1801 }
1802
1803 /// Create a minimal edit that will cause the given row to be indented
1804 /// with the given size. After applying this edit, the length of the line
1805 /// will always be at least `new_size.len`.
1806 pub fn edit_for_indent_size_adjustment(
1807 row: u32,
1808 current_size: IndentSize,
1809 new_size: IndentSize,
1810 ) -> Option<(Range<Point>, String)> {
1811 if new_size.kind == current_size.kind {
1812 match new_size.len.cmp(¤t_size.len) {
1813 Ordering::Greater => {
1814 let point = Point::new(row, 0);
1815 Some((
1816 point..point,
1817 iter::repeat(new_size.char())
1818 .take((new_size.len - current_size.len) as usize)
1819 .collect::<String>(),
1820 ))
1821 }
1822
1823 Ordering::Less => Some((
1824 Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
1825 String::new(),
1826 )),
1827
1828 Ordering::Equal => None,
1829 }
1830 } else {
1831 Some((
1832 Point::new(row, 0)..Point::new(row, current_size.len),
1833 iter::repeat(new_size.char())
1834 .take(new_size.len as usize)
1835 .collect::<String>(),
1836 ))
1837 }
1838 }
1839
1840 /// Spawns a background task that asynchronously computes a `Diff` between the buffer's text
1841 /// and the given new text.
1842 pub fn diff(&self, mut new_text: String, cx: &App) -> Task<Diff> {
1843 let old_text = self.as_rope().clone();
1844 let base_version = self.version();
1845 cx.background_executor()
1846 .spawn_labeled(*BUFFER_DIFF_TASK, async move {
1847 let old_text = old_text.to_string();
1848 let line_ending = LineEnding::detect(&new_text);
1849 LineEnding::normalize(&mut new_text);
1850 let edits = text_diff(&old_text, &new_text);
1851 Diff {
1852 base_version,
1853 line_ending,
1854 edits,
1855 }
1856 })
1857 }
1858
1859 /// Spawns a background task that searches the buffer for any whitespace
1860 /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
1861 pub fn remove_trailing_whitespace(&self, cx: &App) -> Task<Diff> {
1862 let old_text = self.as_rope().clone();
1863 let line_ending = self.line_ending();
1864 let base_version = self.version();
1865 cx.background_spawn(async move {
1866 let ranges = trailing_whitespace_ranges(&old_text);
1867 let empty = Arc::<str>::from("");
1868 Diff {
1869 base_version,
1870 line_ending,
1871 edits: ranges
1872 .into_iter()
1873 .map(|range| (range, empty.clone()))
1874 .collect(),
1875 }
1876 })
1877 }
1878
1879 /// Ensures that the buffer ends with a single newline character, and
1880 /// no other whitespace.
1881 pub fn ensure_final_newline(&mut self, cx: &mut Context<Self>) {
1882 let len = self.len();
1883 let mut offset = len;
1884 for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
1885 let non_whitespace_len = chunk
1886 .trim_end_matches(|c: char| c.is_ascii_whitespace())
1887 .len();
1888 offset -= chunk.len();
1889 offset += non_whitespace_len;
1890 if non_whitespace_len != 0 {
1891 if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
1892 return;
1893 }
1894 break;
1895 }
1896 }
1897 self.edit([(offset..len, "\n")], None, cx);
1898 }
1899
1900 /// Applies a diff to the buffer. If the buffer has changed since the given diff was
1901 /// calculated, then adjust the diff to account for those changes, and discard any
1902 /// parts of the diff that conflict with those changes.
1903 pub fn apply_diff(&mut self, diff: Diff, cx: &mut Context<Self>) -> Option<TransactionId> {
1904 let snapshot = self.snapshot();
1905 let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
1906 let mut delta = 0;
1907 let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
1908 while let Some(edit_since) = edits_since.peek() {
1909 // If the edit occurs after a diff hunk, then it does not
1910 // affect that hunk.
1911 if edit_since.old.start > range.end {
1912 break;
1913 }
1914 // If the edit precedes the diff hunk, then adjust the hunk
1915 // to reflect the edit.
1916 else if edit_since.old.end < range.start {
1917 delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
1918 edits_since.next();
1919 }
1920 // If the edit intersects a diff hunk, then discard that hunk.
1921 else {
1922 return None;
1923 }
1924 }
1925
1926 let start = (range.start as i64 + delta) as usize;
1927 let end = (range.end as i64 + delta) as usize;
1928 Some((start..end, new_text))
1929 });
1930
1931 self.start_transaction();
1932 self.text.set_line_ending(diff.line_ending);
1933 self.edit(adjusted_edits, None, cx);
1934 self.end_transaction(cx)
1935 }
1936
1937 fn has_unsaved_edits(&self) -> bool {
1938 let (last_version, has_unsaved_edits) = self.has_unsaved_edits.take();
1939
1940 if last_version == self.version {
1941 self.has_unsaved_edits
1942 .set((last_version, has_unsaved_edits));
1943 return has_unsaved_edits;
1944 }
1945
1946 let has_edits = self.has_edits_since(&self.saved_version);
1947 self.has_unsaved_edits
1948 .set((self.version.clone(), has_edits));
1949 has_edits
1950 }
1951
1952 /// Checks if the buffer has unsaved changes.
1953 pub fn is_dirty(&self) -> bool {
1954 if self.capability == Capability::ReadOnly {
1955 return false;
1956 }
1957 if self.has_conflict {
1958 return true;
1959 }
1960 match self.file.as_ref().map(|f| f.disk_state()) {
1961 Some(DiskState::New) | Some(DiskState::Deleted) => {
1962 !self.is_empty() && self.has_unsaved_edits()
1963 }
1964 _ => self.has_unsaved_edits(),
1965 }
1966 }
1967
1968 /// Checks if the buffer and its file have both changed since the buffer
1969 /// was last saved or reloaded.
1970 pub fn has_conflict(&self) -> bool {
1971 if self.has_conflict {
1972 return true;
1973 }
1974 let Some(file) = self.file.as_ref() else {
1975 return false;
1976 };
1977 match file.disk_state() {
1978 DiskState::New => false,
1979 DiskState::Present { mtime } => match self.saved_mtime {
1980 Some(saved_mtime) => {
1981 mtime.bad_is_greater_than(saved_mtime) && self.has_unsaved_edits()
1982 }
1983 None => true,
1984 },
1985 DiskState::Deleted => false,
1986 }
1987 }
1988
1989 /// Gets a [`Subscription`] that tracks all of the changes to the buffer's text.
1990 pub fn subscribe(&mut self) -> Subscription {
1991 self.text.subscribe()
1992 }
1993
1994 /// Adds a bit to the list of bits that are set when the buffer's text changes.
1995 ///
1996 /// This allows downstream code to check if the buffer's text has changed without
1997 /// waiting for an effect cycle, which would be required if using eents.
1998 pub fn record_changes(&mut self, bit: rc::Weak<Cell<bool>>) {
1999 if let Err(ix) = self
2000 .change_bits
2001 .binary_search_by_key(&rc::Weak::as_ptr(&bit), rc::Weak::as_ptr)
2002 {
2003 self.change_bits.insert(ix, bit);
2004 }
2005 }
2006
2007 fn was_changed(&mut self) {
2008 self.change_bits.retain(|change_bit| {
2009 change_bit.upgrade().map_or(false, |bit| {
2010 bit.replace(true);
2011 true
2012 })
2013 });
2014 }
2015
2016 /// Starts a transaction, if one is not already in-progress. When undoing or
2017 /// redoing edits, all of the edits performed within a transaction are undone
2018 /// or redone together.
2019 pub fn start_transaction(&mut self) -> Option<TransactionId> {
2020 self.start_transaction_at(Instant::now())
2021 }
2022
2023 /// Starts a transaction, providing the current time. Subsequent transactions
2024 /// that occur within a short period of time will be grouped together. This
2025 /// is controlled by the buffer's undo grouping duration.
2026 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
2027 self.transaction_depth += 1;
2028 if self.was_dirty_before_starting_transaction.is_none() {
2029 self.was_dirty_before_starting_transaction = Some(self.is_dirty());
2030 }
2031 self.text.start_transaction_at(now)
2032 }
2033
2034 /// Terminates the current transaction, if this is the outermost transaction.
2035 pub fn end_transaction(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2036 self.end_transaction_at(Instant::now(), cx)
2037 }
2038
2039 /// Terminates the current transaction, providing the current time. Subsequent transactions
2040 /// that occur within a short period of time will be grouped together. This
2041 /// is controlled by the buffer's undo grouping duration.
2042 pub fn end_transaction_at(
2043 &mut self,
2044 now: Instant,
2045 cx: &mut Context<Self>,
2046 ) -> Option<TransactionId> {
2047 assert!(self.transaction_depth > 0);
2048 self.transaction_depth -= 1;
2049 let was_dirty = if self.transaction_depth == 0 {
2050 self.was_dirty_before_starting_transaction.take().unwrap()
2051 } else {
2052 false
2053 };
2054 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
2055 self.did_edit(&start_version, was_dirty, cx);
2056 Some(transaction_id)
2057 } else {
2058 None
2059 }
2060 }
2061
2062 /// Manually add a transaction to the buffer's undo history.
2063 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
2064 self.text.push_transaction(transaction, now);
2065 }
2066
2067 /// Prevent the last transaction from being grouped with any subsequent transactions,
2068 /// even if they occur with the buffer's undo grouping duration.
2069 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
2070 self.text.finalize_last_transaction()
2071 }
2072
2073 /// Manually group all changes since a given transaction.
2074 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
2075 self.text.group_until_transaction(transaction_id);
2076 }
2077
2078 /// Manually remove a transaction from the buffer's undo history
2079 pub fn forget_transaction(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
2080 self.text.forget_transaction(transaction_id)
2081 }
2082
2083 /// Retrieve a transaction from the buffer's undo history
2084 pub fn get_transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> {
2085 self.text.get_transaction(transaction_id)
2086 }
2087
2088 /// Manually merge two transactions in the buffer's undo history.
2089 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
2090 self.text.merge_transactions(transaction, destination);
2091 }
2092
2093 /// Waits for the buffer to receive operations with the given timestamps.
2094 pub fn wait_for_edits<It: IntoIterator<Item = clock::Lamport>>(
2095 &mut self,
2096 edit_ids: It,
2097 ) -> impl Future<Output = Result<()>> + use<It> {
2098 self.text.wait_for_edits(edit_ids)
2099 }
2100
2101 /// Waits for the buffer to receive the operations necessary for resolving the given anchors.
2102 pub fn wait_for_anchors<It: IntoIterator<Item = Anchor>>(
2103 &mut self,
2104 anchors: It,
2105 ) -> impl 'static + Future<Output = Result<()>> + use<It> {
2106 self.text.wait_for_anchors(anchors)
2107 }
2108
2109 /// Waits for the buffer to receive operations up to the given version.
2110 pub fn wait_for_version(
2111 &mut self,
2112 version: clock::Global,
2113 ) -> impl Future<Output = Result<()>> + use<> {
2114 self.text.wait_for_version(version)
2115 }
2116
2117 /// Forces all futures returned by [`Buffer::wait_for_version`], [`Buffer::wait_for_edits`], or
2118 /// [`Buffer::wait_for_version`] to resolve with an error.
2119 pub fn give_up_waiting(&mut self) {
2120 self.text.give_up_waiting();
2121 }
2122
2123 /// Stores a set of selections that should be broadcasted to all of the buffer's replicas.
2124 pub fn set_active_selections(
2125 &mut self,
2126 selections: Arc<[Selection<Anchor>]>,
2127 line_mode: bool,
2128 cursor_shape: CursorShape,
2129 cx: &mut Context<Self>,
2130 ) {
2131 let lamport_timestamp = self.text.lamport_clock.tick();
2132 self.remote_selections.insert(
2133 self.text.replica_id(),
2134 SelectionSet {
2135 selections: selections.clone(),
2136 lamport_timestamp,
2137 line_mode,
2138 cursor_shape,
2139 },
2140 );
2141 self.send_operation(
2142 Operation::UpdateSelections {
2143 selections,
2144 line_mode,
2145 lamport_timestamp,
2146 cursor_shape,
2147 },
2148 true,
2149 cx,
2150 );
2151 self.non_text_state_update_count += 1;
2152 cx.notify();
2153 }
2154
2155 /// Clears the selections, so that other replicas of the buffer do not see any selections for
2156 /// this replica.
2157 pub fn remove_active_selections(&mut self, cx: &mut Context<Self>) {
2158 if self
2159 .remote_selections
2160 .get(&self.text.replica_id())
2161 .map_or(true, |set| !set.selections.is_empty())
2162 {
2163 self.set_active_selections(Arc::default(), false, Default::default(), cx);
2164 }
2165 }
2166
2167 pub fn set_agent_selections(
2168 &mut self,
2169 selections: Arc<[Selection<Anchor>]>,
2170 line_mode: bool,
2171 cursor_shape: CursorShape,
2172 cx: &mut Context<Self>,
2173 ) {
2174 let lamport_timestamp = self.text.lamport_clock.tick();
2175 self.remote_selections.insert(
2176 AGENT_REPLICA_ID,
2177 SelectionSet {
2178 selections: selections.clone(),
2179 lamport_timestamp,
2180 line_mode,
2181 cursor_shape,
2182 },
2183 );
2184 self.non_text_state_update_count += 1;
2185 cx.notify();
2186 }
2187
2188 pub fn remove_agent_selections(&mut self, cx: &mut Context<Self>) {
2189 self.set_agent_selections(Arc::default(), false, Default::default(), cx);
2190 }
2191
2192 /// Replaces the buffer's entire text.
2193 pub fn set_text<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2194 where
2195 T: Into<Arc<str>>,
2196 {
2197 self.autoindent_requests.clear();
2198 self.edit([(0..self.len(), text)], None, cx)
2199 }
2200
2201 /// Appends the given text to the end of the buffer.
2202 pub fn append<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2203 where
2204 T: Into<Arc<str>>,
2205 {
2206 self.edit([(self.len()..self.len(), text)], None, cx)
2207 }
2208
2209 /// Applies the given edits to the buffer. Each edit is specified as a range of text to
2210 /// delete, and a string of text to insert at that location.
2211 ///
2212 /// If an [`AutoindentMode`] is provided, then the buffer will enqueue an auto-indent
2213 /// request for the edited ranges, which will be processed when the buffer finishes
2214 /// parsing.
2215 ///
2216 /// Parsing takes place at the end of a transaction, and may compute synchronously
2217 /// or asynchronously, depending on the changes.
2218 pub fn edit<I, S, T>(
2219 &mut self,
2220 edits_iter: I,
2221 autoindent_mode: Option<AutoindentMode>,
2222 cx: &mut Context<Self>,
2223 ) -> Option<clock::Lamport>
2224 where
2225 I: IntoIterator<Item = (Range<S>, T)>,
2226 S: ToOffset,
2227 T: Into<Arc<str>>,
2228 {
2229 // Skip invalid edits and coalesce contiguous ones.
2230 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
2231
2232 for (range, new_text) in edits_iter {
2233 let mut range = range.start.to_offset(self)..range.end.to_offset(self);
2234
2235 if range.start > range.end {
2236 mem::swap(&mut range.start, &mut range.end);
2237 }
2238 let new_text = new_text.into();
2239 if !new_text.is_empty() || !range.is_empty() {
2240 if let Some((prev_range, prev_text)) = edits.last_mut() {
2241 if prev_range.end >= range.start {
2242 prev_range.end = cmp::max(prev_range.end, range.end);
2243 *prev_text = format!("{prev_text}{new_text}").into();
2244 } else {
2245 edits.push((range, new_text));
2246 }
2247 } else {
2248 edits.push((range, new_text));
2249 }
2250 }
2251 }
2252 if edits.is_empty() {
2253 return None;
2254 }
2255
2256 self.start_transaction();
2257 self.pending_autoindent.take();
2258 let autoindent_request = autoindent_mode
2259 .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
2260
2261 let edit_operation = self.text.edit(edits.iter().cloned());
2262 let edit_id = edit_operation.timestamp();
2263
2264 if let Some((before_edit, mode)) = autoindent_request {
2265 let mut delta = 0isize;
2266 let entries = edits
2267 .into_iter()
2268 .enumerate()
2269 .zip(&edit_operation.as_edit().unwrap().new_text)
2270 .map(|((ix, (range, _)), new_text)| {
2271 let new_text_length = new_text.len();
2272 let old_start = range.start.to_point(&before_edit);
2273 let new_start = (delta + range.start as isize) as usize;
2274 let range_len = range.end - range.start;
2275 delta += new_text_length as isize - range_len as isize;
2276
2277 // Decide what range of the insertion to auto-indent, and whether
2278 // the first line of the insertion should be considered a newly-inserted line
2279 // or an edit to an existing line.
2280 let mut range_of_insertion_to_indent = 0..new_text_length;
2281 let mut first_line_is_new = true;
2282
2283 let old_line_start = before_edit.indent_size_for_line(old_start.row).len;
2284 let old_line_end = before_edit.line_len(old_start.row);
2285
2286 if old_start.column > old_line_start {
2287 first_line_is_new = false;
2288 }
2289
2290 if !new_text.contains('\n')
2291 && (old_start.column + (range_len as u32) < old_line_end
2292 || old_line_end == old_line_start)
2293 {
2294 first_line_is_new = false;
2295 }
2296
2297 // When inserting text starting with a newline, avoid auto-indenting the
2298 // previous line.
2299 if new_text.starts_with('\n') {
2300 range_of_insertion_to_indent.start += 1;
2301 first_line_is_new = true;
2302 }
2303
2304 let mut original_indent_column = None;
2305 if let AutoindentMode::Block {
2306 original_indent_columns,
2307 } = &mode
2308 {
2309 original_indent_column = Some(if new_text.starts_with('\n') {
2310 indent_size_for_text(
2311 new_text[range_of_insertion_to_indent.clone()].chars(),
2312 )
2313 .len
2314 } else {
2315 original_indent_columns
2316 .get(ix)
2317 .copied()
2318 .flatten()
2319 .unwrap_or_else(|| {
2320 indent_size_for_text(
2321 new_text[range_of_insertion_to_indent.clone()].chars(),
2322 )
2323 .len
2324 })
2325 });
2326
2327 // Avoid auto-indenting the line after the edit.
2328 if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
2329 range_of_insertion_to_indent.end -= 1;
2330 }
2331 }
2332
2333 AutoindentRequestEntry {
2334 first_line_is_new,
2335 original_indent_column,
2336 indent_size: before_edit.language_indent_size_at(range.start, cx),
2337 range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
2338 ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
2339 }
2340 })
2341 .collect();
2342
2343 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2344 before_edit,
2345 entries,
2346 is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
2347 ignore_empty_lines: false,
2348 }));
2349 }
2350
2351 self.end_transaction(cx);
2352 self.send_operation(Operation::Buffer(edit_operation), true, cx);
2353 Some(edit_id)
2354 }
2355
2356 fn did_edit(&mut self, old_version: &clock::Global, was_dirty: bool, cx: &mut Context<Self>) {
2357 self.was_changed();
2358
2359 if self.edits_since::<usize>(old_version).next().is_none() {
2360 return;
2361 }
2362
2363 self.reparse(cx);
2364 cx.emit(BufferEvent::Edited);
2365 if was_dirty != self.is_dirty() {
2366 cx.emit(BufferEvent::DirtyChanged);
2367 }
2368 cx.notify();
2369 }
2370
2371 pub fn autoindent_ranges<I, T>(&mut self, ranges: I, cx: &mut Context<Self>)
2372 where
2373 I: IntoIterator<Item = Range<T>>,
2374 T: ToOffset + Copy,
2375 {
2376 let before_edit = self.snapshot();
2377 let entries = ranges
2378 .into_iter()
2379 .map(|range| AutoindentRequestEntry {
2380 range: before_edit.anchor_before(range.start)..before_edit.anchor_after(range.end),
2381 first_line_is_new: true,
2382 indent_size: before_edit.language_indent_size_at(range.start, cx),
2383 original_indent_column: None,
2384 })
2385 .collect();
2386 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2387 before_edit,
2388 entries,
2389 is_block_mode: false,
2390 ignore_empty_lines: true,
2391 }));
2392 self.request_autoindent(cx);
2393 }
2394
2395 // Inserts newlines at the given position to create an empty line, returning the start of the new line.
2396 // You can also request the insertion of empty lines above and below the line starting at the returned point.
2397 pub fn insert_empty_line(
2398 &mut self,
2399 position: impl ToPoint,
2400 space_above: bool,
2401 space_below: bool,
2402 cx: &mut Context<Self>,
2403 ) -> Point {
2404 let mut position = position.to_point(self);
2405
2406 self.start_transaction();
2407
2408 self.edit(
2409 [(position..position, "\n")],
2410 Some(AutoindentMode::EachLine),
2411 cx,
2412 );
2413
2414 if position.column > 0 {
2415 position += Point::new(1, 0);
2416 }
2417
2418 if !self.is_line_blank(position.row) {
2419 self.edit(
2420 [(position..position, "\n")],
2421 Some(AutoindentMode::EachLine),
2422 cx,
2423 );
2424 }
2425
2426 if space_above && position.row > 0 && !self.is_line_blank(position.row - 1) {
2427 self.edit(
2428 [(position..position, "\n")],
2429 Some(AutoindentMode::EachLine),
2430 cx,
2431 );
2432 position.row += 1;
2433 }
2434
2435 if space_below
2436 && (position.row == self.max_point().row || !self.is_line_blank(position.row + 1))
2437 {
2438 self.edit(
2439 [(position..position, "\n")],
2440 Some(AutoindentMode::EachLine),
2441 cx,
2442 );
2443 }
2444
2445 self.end_transaction(cx);
2446
2447 position
2448 }
2449
2450 /// Applies the given remote operations to the buffer.
2451 pub fn apply_ops<I: IntoIterator<Item = Operation>>(&mut self, ops: I, cx: &mut Context<Self>) {
2452 self.pending_autoindent.take();
2453 let was_dirty = self.is_dirty();
2454 let old_version = self.version.clone();
2455 let mut deferred_ops = Vec::new();
2456 let buffer_ops = ops
2457 .into_iter()
2458 .filter_map(|op| match op {
2459 Operation::Buffer(op) => Some(op),
2460 _ => {
2461 if self.can_apply_op(&op) {
2462 self.apply_op(op, cx);
2463 } else {
2464 deferred_ops.push(op);
2465 }
2466 None
2467 }
2468 })
2469 .collect::<Vec<_>>();
2470 for operation in buffer_ops.iter() {
2471 self.send_operation(Operation::Buffer(operation.clone()), false, cx);
2472 }
2473 self.text.apply_ops(buffer_ops);
2474 self.deferred_ops.insert(deferred_ops);
2475 self.flush_deferred_ops(cx);
2476 self.did_edit(&old_version, was_dirty, cx);
2477 // Notify independently of whether the buffer was edited as the operations could include a
2478 // selection update.
2479 cx.notify();
2480 }
2481
2482 fn flush_deferred_ops(&mut self, cx: &mut Context<Self>) {
2483 let mut deferred_ops = Vec::new();
2484 for op in self.deferred_ops.drain().iter().cloned() {
2485 if self.can_apply_op(&op) {
2486 self.apply_op(op, cx);
2487 } else {
2488 deferred_ops.push(op);
2489 }
2490 }
2491 self.deferred_ops.insert(deferred_ops);
2492 }
2493
2494 pub fn has_deferred_ops(&self) -> bool {
2495 !self.deferred_ops.is_empty() || self.text.has_deferred_ops()
2496 }
2497
2498 fn can_apply_op(&self, operation: &Operation) -> bool {
2499 match operation {
2500 Operation::Buffer(_) => {
2501 unreachable!("buffer operations should never be applied at this layer")
2502 }
2503 Operation::UpdateDiagnostics {
2504 diagnostics: diagnostic_set,
2505 ..
2506 } => diagnostic_set.iter().all(|diagnostic| {
2507 self.text.can_resolve(&diagnostic.range.start)
2508 && self.text.can_resolve(&diagnostic.range.end)
2509 }),
2510 Operation::UpdateSelections { selections, .. } => selections
2511 .iter()
2512 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
2513 Operation::UpdateCompletionTriggers { .. } => true,
2514 }
2515 }
2516
2517 fn apply_op(&mut self, operation: Operation, cx: &mut Context<Self>) {
2518 match operation {
2519 Operation::Buffer(_) => {
2520 unreachable!("buffer operations should never be applied at this layer")
2521 }
2522 Operation::UpdateDiagnostics {
2523 server_id,
2524 diagnostics: diagnostic_set,
2525 lamport_timestamp,
2526 } => {
2527 let snapshot = self.snapshot();
2528 self.apply_diagnostic_update(
2529 server_id,
2530 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
2531 lamport_timestamp,
2532 cx,
2533 );
2534 }
2535 Operation::UpdateSelections {
2536 selections,
2537 lamport_timestamp,
2538 line_mode,
2539 cursor_shape,
2540 } => {
2541 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id) {
2542 if set.lamport_timestamp > lamport_timestamp {
2543 return;
2544 }
2545 }
2546
2547 self.remote_selections.insert(
2548 lamport_timestamp.replica_id,
2549 SelectionSet {
2550 selections,
2551 lamport_timestamp,
2552 line_mode,
2553 cursor_shape,
2554 },
2555 );
2556 self.text.lamport_clock.observe(lamport_timestamp);
2557 self.non_text_state_update_count += 1;
2558 }
2559 Operation::UpdateCompletionTriggers {
2560 triggers,
2561 lamport_timestamp,
2562 server_id,
2563 } => {
2564 if triggers.is_empty() {
2565 self.completion_triggers_per_language_server
2566 .remove(&server_id);
2567 self.completion_triggers = self
2568 .completion_triggers_per_language_server
2569 .values()
2570 .flat_map(|triggers| triggers.into_iter().cloned())
2571 .collect();
2572 } else {
2573 self.completion_triggers_per_language_server
2574 .insert(server_id, triggers.iter().cloned().collect());
2575 self.completion_triggers.extend(triggers);
2576 }
2577 self.text.lamport_clock.observe(lamport_timestamp);
2578 }
2579 }
2580 }
2581
2582 fn apply_diagnostic_update(
2583 &mut self,
2584 server_id: LanguageServerId,
2585 diagnostics: DiagnosticSet,
2586 lamport_timestamp: clock::Lamport,
2587 cx: &mut Context<Self>,
2588 ) {
2589 if lamport_timestamp > self.diagnostics_timestamp {
2590 let ix = self.diagnostics.binary_search_by_key(&server_id, |e| e.0);
2591 if diagnostics.is_empty() {
2592 if let Ok(ix) = ix {
2593 self.diagnostics.remove(ix);
2594 }
2595 } else {
2596 match ix {
2597 Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)),
2598 Ok(ix) => self.diagnostics[ix].1 = diagnostics,
2599 };
2600 }
2601 self.diagnostics_timestamp = lamport_timestamp;
2602 self.non_text_state_update_count += 1;
2603 self.text.lamport_clock.observe(lamport_timestamp);
2604 cx.notify();
2605 cx.emit(BufferEvent::DiagnosticsUpdated);
2606 }
2607 }
2608
2609 fn send_operation(&mut self, operation: Operation, is_local: bool, cx: &mut Context<Self>) {
2610 self.was_changed();
2611 cx.emit(BufferEvent::Operation {
2612 operation,
2613 is_local,
2614 });
2615 }
2616
2617 /// Removes the selections for a given peer.
2618 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut Context<Self>) {
2619 self.remote_selections.remove(&replica_id);
2620 cx.notify();
2621 }
2622
2623 /// Undoes the most recent transaction.
2624 pub fn undo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2625 let was_dirty = self.is_dirty();
2626 let old_version = self.version.clone();
2627
2628 if let Some((transaction_id, operation)) = self.text.undo() {
2629 self.send_operation(Operation::Buffer(operation), true, cx);
2630 self.did_edit(&old_version, was_dirty, cx);
2631 Some(transaction_id)
2632 } else {
2633 None
2634 }
2635 }
2636
2637 /// Manually undoes a specific transaction in the buffer's undo history.
2638 pub fn undo_transaction(
2639 &mut self,
2640 transaction_id: TransactionId,
2641 cx: &mut Context<Self>,
2642 ) -> bool {
2643 let was_dirty = self.is_dirty();
2644 let old_version = self.version.clone();
2645 if let Some(operation) = self.text.undo_transaction(transaction_id) {
2646 self.send_operation(Operation::Buffer(operation), true, cx);
2647 self.did_edit(&old_version, was_dirty, cx);
2648 true
2649 } else {
2650 false
2651 }
2652 }
2653
2654 /// Manually undoes all changes after a given transaction in the buffer's undo history.
2655 pub fn undo_to_transaction(
2656 &mut self,
2657 transaction_id: TransactionId,
2658 cx: &mut Context<Self>,
2659 ) -> bool {
2660 let was_dirty = self.is_dirty();
2661 let old_version = self.version.clone();
2662
2663 let operations = self.text.undo_to_transaction(transaction_id);
2664 let undone = !operations.is_empty();
2665 for operation in operations {
2666 self.send_operation(Operation::Buffer(operation), true, cx);
2667 }
2668 if undone {
2669 self.did_edit(&old_version, was_dirty, cx)
2670 }
2671 undone
2672 }
2673
2674 pub fn undo_operations(&mut self, counts: HashMap<Lamport, u32>, cx: &mut Context<Buffer>) {
2675 let was_dirty = self.is_dirty();
2676 let operation = self.text.undo_operations(counts);
2677 let old_version = self.version.clone();
2678 self.send_operation(Operation::Buffer(operation), true, cx);
2679 self.did_edit(&old_version, was_dirty, cx);
2680 }
2681
2682 /// Manually redoes a specific transaction in the buffer's redo history.
2683 pub fn redo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2684 let was_dirty = self.is_dirty();
2685 let old_version = self.version.clone();
2686
2687 if let Some((transaction_id, operation)) = self.text.redo() {
2688 self.send_operation(Operation::Buffer(operation), true, cx);
2689 self.did_edit(&old_version, was_dirty, cx);
2690 Some(transaction_id)
2691 } else {
2692 None
2693 }
2694 }
2695
2696 /// Manually undoes all changes until a given transaction in the buffer's redo history.
2697 pub fn redo_to_transaction(
2698 &mut self,
2699 transaction_id: TransactionId,
2700 cx: &mut Context<Self>,
2701 ) -> bool {
2702 let was_dirty = self.is_dirty();
2703 let old_version = self.version.clone();
2704
2705 let operations = self.text.redo_to_transaction(transaction_id);
2706 let redone = !operations.is_empty();
2707 for operation in operations {
2708 self.send_operation(Operation::Buffer(operation), true, cx);
2709 }
2710 if redone {
2711 self.did_edit(&old_version, was_dirty, cx)
2712 }
2713 redone
2714 }
2715
2716 /// Override current completion triggers with the user-provided completion triggers.
2717 pub fn set_completion_triggers(
2718 &mut self,
2719 server_id: LanguageServerId,
2720 triggers: BTreeSet<String>,
2721 cx: &mut Context<Self>,
2722 ) {
2723 self.completion_triggers_timestamp = self.text.lamport_clock.tick();
2724 if triggers.is_empty() {
2725 self.completion_triggers_per_language_server
2726 .remove(&server_id);
2727 self.completion_triggers = self
2728 .completion_triggers_per_language_server
2729 .values()
2730 .flat_map(|triggers| triggers.into_iter().cloned())
2731 .collect();
2732 } else {
2733 self.completion_triggers_per_language_server
2734 .insert(server_id, triggers.clone());
2735 self.completion_triggers.extend(triggers.iter().cloned());
2736 }
2737 self.send_operation(
2738 Operation::UpdateCompletionTriggers {
2739 triggers: triggers.into_iter().collect(),
2740 lamport_timestamp: self.completion_triggers_timestamp,
2741 server_id,
2742 },
2743 true,
2744 cx,
2745 );
2746 cx.notify();
2747 }
2748
2749 /// Returns a list of strings which trigger a completion menu for this language.
2750 /// Usually this is driven by LSP server which returns a list of trigger characters for completions.
2751 pub fn completion_triggers(&self) -> &BTreeSet<String> {
2752 &self.completion_triggers
2753 }
2754
2755 /// Call this directly after performing edits to prevent the preview tab
2756 /// from being dismissed by those edits. It causes `should_dismiss_preview`
2757 /// to return false until there are additional edits.
2758 pub fn refresh_preview(&mut self) {
2759 self.preview_version = self.version.clone();
2760 }
2761
2762 /// Whether we should preserve the preview status of a tab containing this buffer.
2763 pub fn preserve_preview(&self) -> bool {
2764 !self.has_edits_since(&self.preview_version)
2765 }
2766
2767 pub fn result_id(&self) -> Option<String> {
2768 self.pull_diagnostics_result_id.clone()
2769 }
2770
2771 pub fn set_result_id(&mut self, result_id: Option<String>) {
2772 self.pull_diagnostics_result_id = result_id;
2773 }
2774}
2775
2776#[doc(hidden)]
2777#[cfg(any(test, feature = "test-support"))]
2778impl Buffer {
2779 pub fn edit_via_marked_text(
2780 &mut self,
2781 marked_string: &str,
2782 autoindent_mode: Option<AutoindentMode>,
2783 cx: &mut Context<Self>,
2784 ) {
2785 let edits = self.edits_for_marked_text(marked_string);
2786 self.edit(edits, autoindent_mode, cx);
2787 }
2788
2789 pub fn set_group_interval(&mut self, group_interval: Duration) {
2790 self.text.set_group_interval(group_interval);
2791 }
2792
2793 pub fn randomly_edit<T>(&mut self, rng: &mut T, old_range_count: usize, cx: &mut Context<Self>)
2794 where
2795 T: rand::Rng,
2796 {
2797 let mut edits: Vec<(Range<usize>, String)> = Vec::new();
2798 let mut last_end = None;
2799 for _ in 0..old_range_count {
2800 if last_end.map_or(false, |last_end| last_end >= self.len()) {
2801 break;
2802 }
2803
2804 let new_start = last_end.map_or(0, |last_end| last_end + 1);
2805 let mut range = self.random_byte_range(new_start, rng);
2806 if rng.gen_bool(0.2) {
2807 mem::swap(&mut range.start, &mut range.end);
2808 }
2809 last_end = Some(range.end);
2810
2811 let new_text_len = rng.gen_range(0..10);
2812 let mut new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
2813 new_text = new_text.to_uppercase();
2814
2815 edits.push((range, new_text));
2816 }
2817 log::info!("mutating buffer {} with {:?}", self.replica_id(), edits);
2818 self.edit(edits, None, cx);
2819 }
2820
2821 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut Context<Self>) {
2822 let was_dirty = self.is_dirty();
2823 let old_version = self.version.clone();
2824
2825 let ops = self.text.randomly_undo_redo(rng);
2826 if !ops.is_empty() {
2827 for op in ops {
2828 self.send_operation(Operation::Buffer(op), true, cx);
2829 self.did_edit(&old_version, was_dirty, cx);
2830 }
2831 }
2832 }
2833}
2834
2835impl EventEmitter<BufferEvent> for Buffer {}
2836
2837impl Deref for Buffer {
2838 type Target = TextBuffer;
2839
2840 fn deref(&self) -> &Self::Target {
2841 &self.text
2842 }
2843}
2844
2845impl BufferSnapshot {
2846 /// Returns [`IndentSize`] for a given line that respects user settings and
2847 /// language preferences.
2848 pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
2849 indent_size_for_line(self, row)
2850 }
2851
2852 /// Returns [`IndentSize`] for a given position that respects user settings
2853 /// and language preferences.
2854 pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &App) -> IndentSize {
2855 let settings = language_settings(
2856 self.language_at(position).map(|l| l.name()),
2857 self.file(),
2858 cx,
2859 );
2860 if settings.hard_tabs {
2861 IndentSize::tab()
2862 } else {
2863 IndentSize::spaces(settings.tab_size.get())
2864 }
2865 }
2866
2867 /// Retrieve the suggested indent size for all of the given rows. The unit of indentation
2868 /// is passed in as `single_indent_size`.
2869 pub fn suggested_indents(
2870 &self,
2871 rows: impl Iterator<Item = u32>,
2872 single_indent_size: IndentSize,
2873 ) -> BTreeMap<u32, IndentSize> {
2874 let mut result = BTreeMap::new();
2875
2876 for row_range in contiguous_ranges(rows, 10) {
2877 let suggestions = match self.suggest_autoindents(row_range.clone()) {
2878 Some(suggestions) => suggestions,
2879 _ => break,
2880 };
2881
2882 for (row, suggestion) in row_range.zip(suggestions) {
2883 let indent_size = if let Some(suggestion) = suggestion {
2884 result
2885 .get(&suggestion.basis_row)
2886 .copied()
2887 .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
2888 .with_delta(suggestion.delta, single_indent_size)
2889 } else {
2890 self.indent_size_for_line(row)
2891 };
2892
2893 result.insert(row, indent_size);
2894 }
2895 }
2896
2897 result
2898 }
2899
2900 fn suggest_autoindents(
2901 &self,
2902 row_range: Range<u32>,
2903 ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
2904 let config = &self.language.as_ref()?.config;
2905 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
2906 let significant_indentation = config.significant_indentation;
2907
2908 // Find the suggested indentation ranges based on the syntax tree.
2909 let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
2910 let end = Point::new(row_range.end, 0);
2911 let range = (start..end).to_offset(&self.text);
2912 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2913 Some(&grammar.indents_config.as_ref()?.query)
2914 });
2915 let indent_configs = matches
2916 .grammars()
2917 .iter()
2918 .map(|grammar| grammar.indents_config.as_ref().unwrap())
2919 .collect::<Vec<_>>();
2920
2921 let mut indent_ranges = Vec::<Range<Point>>::new();
2922 let mut outdent_positions = Vec::<Point>::new();
2923 while let Some(mat) = matches.peek() {
2924 let mut start: Option<Point> = None;
2925 let mut end: Option<Point> = None;
2926 let mut outdent: Option<Point> = None;
2927
2928 let config = &indent_configs[mat.grammar_index];
2929 for capture in mat.captures {
2930 if capture.index == config.indent_capture_ix {
2931 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
2932 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
2933 } else if Some(capture.index) == config.start_capture_ix {
2934 start = Some(Point::from_ts_point(capture.node.end_position()));
2935 } else if Some(capture.index) == config.end_capture_ix {
2936 end = Some(Point::from_ts_point(capture.node.start_position()));
2937 } else if Some(capture.index) == config.outdent_capture_ix {
2938 let point = Point::from_ts_point(capture.node.start_position());
2939 outdent.get_or_insert(point);
2940 outdent_positions.push(point);
2941 }
2942 }
2943
2944 matches.advance();
2945 // in case of significant indentation expand end to outdent position
2946 let end = if significant_indentation {
2947 outdent.or(end)
2948 } else {
2949 end
2950 };
2951 if let Some((start, end)) = start.zip(end) {
2952 if start.row == end.row && (!significant_indentation || start.column < end.column) {
2953 continue;
2954 }
2955 let range = start..end;
2956 match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
2957 Err(ix) => indent_ranges.insert(ix, range),
2958 Ok(ix) => {
2959 let prev_range = &mut indent_ranges[ix];
2960 prev_range.end = prev_range.end.max(range.end);
2961 }
2962 }
2963 }
2964 }
2965
2966 let mut error_ranges = Vec::<Range<Point>>::new();
2967 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2968 grammar.error_query.as_ref()
2969 });
2970 while let Some(mat) = matches.peek() {
2971 let node = mat.captures[0].node;
2972 let start = Point::from_ts_point(node.start_position());
2973 let end = Point::from_ts_point(node.end_position());
2974 let range = start..end;
2975 let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
2976 Ok(ix) | Err(ix) => ix,
2977 };
2978 let mut end_ix = ix;
2979 while let Some(existing_range) = error_ranges.get(end_ix) {
2980 if existing_range.end < end {
2981 end_ix += 1;
2982 } else {
2983 break;
2984 }
2985 }
2986 error_ranges.splice(ix..end_ix, [range]);
2987 matches.advance();
2988 }
2989
2990 // we don't use outdent positions to truncate in case of significant indentation
2991 // rather we use them to expand (handled above)
2992 if !significant_indentation {
2993 outdent_positions.sort();
2994 for outdent_position in outdent_positions {
2995 // find the innermost indent range containing this outdent_position
2996 // set its end to the outdent position
2997 if let Some(range_to_truncate) = indent_ranges
2998 .iter_mut()
2999 .filter(|indent_range| indent_range.contains(&outdent_position))
3000 .next_back()
3001 {
3002 range_to_truncate.end = outdent_position;
3003 }
3004 }
3005 }
3006
3007 // Find the suggested indentation increases and decreased based on regexes.
3008 let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
3009 self.for_each_line(
3010 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
3011 ..Point::new(row_range.end, 0),
3012 |row, line| {
3013 if config
3014 .decrease_indent_pattern
3015 .as_ref()
3016 .map_or(false, |regex| regex.is_match(line))
3017 {
3018 indent_change_rows.push((row, Ordering::Less));
3019 }
3020 if config
3021 .increase_indent_pattern
3022 .as_ref()
3023 .map_or(false, |regex| regex.is_match(line))
3024 {
3025 indent_change_rows.push((row + 1, Ordering::Greater));
3026 }
3027 },
3028 );
3029
3030 let mut indent_changes = indent_change_rows.into_iter().peekable();
3031 let mut prev_row = if config.auto_indent_using_last_non_empty_line {
3032 prev_non_blank_row.unwrap_or(0)
3033 } else {
3034 row_range.start.saturating_sub(1)
3035 };
3036 let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
3037 Some(row_range.map(move |row| {
3038 let row_start = Point::new(row, self.indent_size_for_line(row).len);
3039
3040 let mut indent_from_prev_row = false;
3041 let mut outdent_from_prev_row = false;
3042 let mut outdent_to_row = u32::MAX;
3043 let mut from_regex = false;
3044
3045 while let Some((indent_row, delta)) = indent_changes.peek() {
3046 match indent_row.cmp(&row) {
3047 Ordering::Equal => match delta {
3048 Ordering::Less => {
3049 from_regex = true;
3050 outdent_from_prev_row = true
3051 }
3052 Ordering::Greater => {
3053 indent_from_prev_row = true;
3054 from_regex = true
3055 }
3056 _ => {}
3057 },
3058
3059 Ordering::Greater => break,
3060 Ordering::Less => {}
3061 }
3062
3063 indent_changes.next();
3064 }
3065
3066 for range in &indent_ranges {
3067 if range.start.row >= row {
3068 break;
3069 }
3070 if range.start.row == prev_row && range.end > row_start {
3071 indent_from_prev_row = true;
3072 }
3073 if significant_indentation && self.is_line_blank(row) && range.start.row == prev_row
3074 {
3075 indent_from_prev_row = true;
3076 }
3077 if !significant_indentation || !self.is_line_blank(row) {
3078 if range.end > prev_row_start && range.end <= row_start {
3079 outdent_to_row = outdent_to_row.min(range.start.row);
3080 }
3081 }
3082 }
3083
3084 let within_error = error_ranges
3085 .iter()
3086 .any(|e| e.start.row < row && e.end > row_start);
3087
3088 let suggestion = if outdent_to_row == prev_row
3089 || (outdent_from_prev_row && indent_from_prev_row)
3090 {
3091 Some(IndentSuggestion {
3092 basis_row: prev_row,
3093 delta: Ordering::Equal,
3094 within_error: within_error && !from_regex,
3095 })
3096 } else if indent_from_prev_row {
3097 Some(IndentSuggestion {
3098 basis_row: prev_row,
3099 delta: Ordering::Greater,
3100 within_error: within_error && !from_regex,
3101 })
3102 } else if outdent_to_row < prev_row {
3103 Some(IndentSuggestion {
3104 basis_row: outdent_to_row,
3105 delta: Ordering::Equal,
3106 within_error: within_error && !from_regex,
3107 })
3108 } else if outdent_from_prev_row {
3109 Some(IndentSuggestion {
3110 basis_row: prev_row,
3111 delta: Ordering::Less,
3112 within_error: within_error && !from_regex,
3113 })
3114 } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
3115 {
3116 Some(IndentSuggestion {
3117 basis_row: prev_row,
3118 delta: Ordering::Equal,
3119 within_error: within_error && !from_regex,
3120 })
3121 } else {
3122 None
3123 };
3124
3125 prev_row = row;
3126 prev_row_start = row_start;
3127 suggestion
3128 }))
3129 }
3130
3131 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
3132 while row > 0 {
3133 row -= 1;
3134 if !self.is_line_blank(row) {
3135 return Some(row);
3136 }
3137 }
3138 None
3139 }
3140
3141 fn get_highlights(&self, range: Range<usize>) -> (SyntaxMapCaptures, Vec<HighlightMap>) {
3142 let captures = self.syntax.captures(range, &self.text, |grammar| {
3143 grammar.highlights_query.as_ref()
3144 });
3145 let highlight_maps = captures
3146 .grammars()
3147 .iter()
3148 .map(|grammar| grammar.highlight_map())
3149 .collect();
3150 (captures, highlight_maps)
3151 }
3152
3153 /// Iterates over chunks of text in the given range of the buffer. Text is chunked
3154 /// in an arbitrary way due to being stored in a [`Rope`](text::Rope). The text is also
3155 /// returned in chunks where each chunk has a single syntax highlighting style and
3156 /// diagnostic status.
3157 pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks {
3158 let range = range.start.to_offset(self)..range.end.to_offset(self);
3159
3160 let mut syntax = None;
3161 if language_aware {
3162 syntax = Some(self.get_highlights(range.clone()));
3163 }
3164 // We want to look at diagnostic spans only when iterating over language-annotated chunks.
3165 let diagnostics = language_aware;
3166 BufferChunks::new(self.text.as_rope(), range, syntax, diagnostics, Some(self))
3167 }
3168
3169 pub fn highlighted_text_for_range<T: ToOffset>(
3170 &self,
3171 range: Range<T>,
3172 override_style: Option<HighlightStyle>,
3173 syntax_theme: &SyntaxTheme,
3174 ) -> HighlightedText {
3175 HighlightedText::from_buffer_range(
3176 range,
3177 &self.text,
3178 &self.syntax,
3179 override_style,
3180 syntax_theme,
3181 )
3182 }
3183
3184 /// Invokes the given callback for each line of text in the given range of the buffer.
3185 /// Uses callback to avoid allocating a string for each line.
3186 fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
3187 let mut line = String::new();
3188 let mut row = range.start.row;
3189 for chunk in self
3190 .as_rope()
3191 .chunks_in_range(range.to_offset(self))
3192 .chain(["\n"])
3193 {
3194 for (newline_ix, text) in chunk.split('\n').enumerate() {
3195 if newline_ix > 0 {
3196 callback(row, &line);
3197 row += 1;
3198 line.clear();
3199 }
3200 line.push_str(text);
3201 }
3202 }
3203 }
3204
3205 /// Iterates over every [`SyntaxLayer`] in the buffer.
3206 pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayer> + '_ {
3207 self.syntax
3208 .layers_for_range(0..self.len(), &self.text, true)
3209 }
3210
3211 pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayer> {
3212 let offset = position.to_offset(self);
3213 self.syntax
3214 .layers_for_range(offset..offset, &self.text, false)
3215 .filter(|l| l.node().end_byte() > offset)
3216 .last()
3217 }
3218
3219 pub fn smallest_syntax_layer_containing<D: ToOffset>(
3220 &self,
3221 range: Range<D>,
3222 ) -> Option<SyntaxLayer> {
3223 let range = range.to_offset(self);
3224 return self
3225 .syntax
3226 .layers_for_range(range, &self.text, false)
3227 .max_by(|a, b| {
3228 if a.depth != b.depth {
3229 a.depth.cmp(&b.depth)
3230 } else if a.offset.0 != b.offset.0 {
3231 a.offset.0.cmp(&b.offset.0)
3232 } else {
3233 a.node().end_byte().cmp(&b.node().end_byte()).reverse()
3234 }
3235 });
3236 }
3237
3238 /// Returns the main [`Language`].
3239 pub fn language(&self) -> Option<&Arc<Language>> {
3240 self.language.as_ref()
3241 }
3242
3243 /// Returns the [`Language`] at the given location.
3244 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
3245 self.syntax_layer_at(position)
3246 .map(|info| info.language)
3247 .or(self.language.as_ref())
3248 }
3249
3250 /// Returns the settings for the language at the given location.
3251 pub fn settings_at<'a, D: ToOffset>(
3252 &'a self,
3253 position: D,
3254 cx: &'a App,
3255 ) -> Cow<'a, LanguageSettings> {
3256 language_settings(
3257 self.language_at(position).map(|l| l.name()),
3258 self.file.as_ref(),
3259 cx,
3260 )
3261 }
3262
3263 pub fn char_classifier_at<T: ToOffset>(&self, point: T) -> CharClassifier {
3264 CharClassifier::new(self.language_scope_at(point))
3265 }
3266
3267 /// Returns the [`LanguageScope`] at the given location.
3268 pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
3269 let offset = position.to_offset(self);
3270 let mut scope = None;
3271 let mut smallest_range_and_depth: Option<(Range<usize>, usize)> = None;
3272
3273 // Use the layer that has the smallest node intersecting the given point.
3274 for layer in self
3275 .syntax
3276 .layers_for_range(offset..offset, &self.text, false)
3277 {
3278 let mut cursor = layer.node().walk();
3279
3280 let mut range = None;
3281 loop {
3282 let child_range = cursor.node().byte_range();
3283 if !child_range.contains(&offset) {
3284 break;
3285 }
3286
3287 range = Some(child_range);
3288 if cursor.goto_first_child_for_byte(offset).is_none() {
3289 break;
3290 }
3291 }
3292
3293 if let Some(range) = range {
3294 if smallest_range_and_depth.as_ref().map_or(
3295 true,
3296 |(smallest_range, smallest_range_depth)| {
3297 if layer.depth > *smallest_range_depth {
3298 true
3299 } else if layer.depth == *smallest_range_depth {
3300 range.len() < smallest_range.len()
3301 } else {
3302 false
3303 }
3304 },
3305 ) {
3306 smallest_range_and_depth = Some((range, layer.depth));
3307 scope = Some(LanguageScope {
3308 language: layer.language.clone(),
3309 override_id: layer.override_id(offset, &self.text),
3310 });
3311 }
3312 }
3313 }
3314
3315 scope.or_else(|| {
3316 self.language.clone().map(|language| LanguageScope {
3317 language,
3318 override_id: None,
3319 })
3320 })
3321 }
3322
3323 /// Returns a tuple of the range and character kind of the word
3324 /// surrounding the given position.
3325 pub fn surrounding_word<T: ToOffset>(&self, start: T) -> (Range<usize>, Option<CharKind>) {
3326 let mut start = start.to_offset(self);
3327 let mut end = start;
3328 let mut next_chars = self.chars_at(start).take(128).peekable();
3329 let mut prev_chars = self.reversed_chars_at(start).take(128).peekable();
3330
3331 let classifier = self.char_classifier_at(start);
3332 let word_kind = cmp::max(
3333 prev_chars.peek().copied().map(|c| classifier.kind(c)),
3334 next_chars.peek().copied().map(|c| classifier.kind(c)),
3335 );
3336
3337 for ch in prev_chars {
3338 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3339 start -= ch.len_utf8();
3340 } else {
3341 break;
3342 }
3343 }
3344
3345 for ch in next_chars {
3346 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3347 end += ch.len_utf8();
3348 } else {
3349 break;
3350 }
3351 }
3352
3353 (start..end, word_kind)
3354 }
3355
3356 /// Returns the closest syntax node enclosing the given range.
3357 pub fn syntax_ancestor<'a, T: ToOffset>(
3358 &'a self,
3359 range: Range<T>,
3360 ) -> Option<tree_sitter::Node<'a>> {
3361 let range = range.start.to_offset(self)..range.end.to_offset(self);
3362 let mut result: Option<tree_sitter::Node<'a>> = None;
3363 'outer: for layer in self
3364 .syntax
3365 .layers_for_range(range.clone(), &self.text, true)
3366 {
3367 let mut cursor = layer.node().walk();
3368
3369 // Descend to the first leaf that touches the start of the range.
3370 //
3371 // If the range is non-empty and the current node ends exactly at the start,
3372 // move to the next sibling to find a node that extends beyond the start.
3373 //
3374 // If the range is empty and the current node starts after the range position,
3375 // move to the previous sibling to find the node that contains the position.
3376 while cursor.goto_first_child_for_byte(range.start).is_some() {
3377 if !range.is_empty() && cursor.node().end_byte() == range.start {
3378 cursor.goto_next_sibling();
3379 }
3380 if range.is_empty() && cursor.node().start_byte() > range.start {
3381 cursor.goto_previous_sibling();
3382 }
3383 }
3384
3385 // Ascend to the smallest ancestor that strictly contains the range.
3386 loop {
3387 let node_range = cursor.node().byte_range();
3388 if node_range.start <= range.start
3389 && node_range.end >= range.end
3390 && node_range.len() > range.len()
3391 {
3392 break;
3393 }
3394 if !cursor.goto_parent() {
3395 continue 'outer;
3396 }
3397 }
3398
3399 let left_node = cursor.node();
3400 let mut layer_result = left_node;
3401
3402 // For an empty range, try to find another node immediately to the right of the range.
3403 if left_node.end_byte() == range.start {
3404 let mut right_node = None;
3405 while !cursor.goto_next_sibling() {
3406 if !cursor.goto_parent() {
3407 break;
3408 }
3409 }
3410
3411 while cursor.node().start_byte() == range.start {
3412 right_node = Some(cursor.node());
3413 if !cursor.goto_first_child() {
3414 break;
3415 }
3416 }
3417
3418 // If there is a candidate node on both sides of the (empty) range, then
3419 // decide between the two by favoring a named node over an anonymous token.
3420 // If both nodes are the same in that regard, favor the right one.
3421 if let Some(right_node) = right_node {
3422 if right_node.is_named() || !left_node.is_named() {
3423 layer_result = right_node;
3424 }
3425 }
3426 }
3427
3428 if let Some(previous_result) = &result {
3429 if previous_result.byte_range().len() < layer_result.byte_range().len() {
3430 continue;
3431 }
3432 }
3433 result = Some(layer_result);
3434 }
3435
3436 result
3437 }
3438
3439 /// Returns the root syntax node within the given row
3440 pub fn syntax_root_ancestor(&self, position: Anchor) -> Option<tree_sitter::Node> {
3441 let start_offset = position.to_offset(self);
3442
3443 let row = self.summary_for_anchor::<text::PointUtf16>(&position).row as usize;
3444
3445 let layer = self
3446 .syntax
3447 .layers_for_range(start_offset..start_offset, &self.text, true)
3448 .next()?;
3449
3450 let mut cursor = layer.node().walk();
3451
3452 // Descend to the first leaf that touches the start of the range.
3453 while cursor.goto_first_child_for_byte(start_offset).is_some() {
3454 if cursor.node().end_byte() == start_offset {
3455 cursor.goto_next_sibling();
3456 }
3457 }
3458
3459 // Ascend to the root node within the same row.
3460 while cursor.goto_parent() {
3461 if cursor.node().start_position().row != row {
3462 break;
3463 }
3464 }
3465
3466 return Some(cursor.node());
3467 }
3468
3469 /// Returns the outline for the buffer.
3470 ///
3471 /// This method allows passing an optional [`SyntaxTheme`] to
3472 /// syntax-highlight the returned symbols.
3473 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Option<Outline<Anchor>> {
3474 self.outline_items_containing(0..self.len(), true, theme)
3475 .map(Outline::new)
3476 }
3477
3478 /// Returns all the symbols that contain the given position.
3479 ///
3480 /// This method allows passing an optional [`SyntaxTheme`] to
3481 /// syntax-highlight the returned symbols.
3482 pub fn symbols_containing<T: ToOffset>(
3483 &self,
3484 position: T,
3485 theme: Option<&SyntaxTheme>,
3486 ) -> Option<Vec<OutlineItem<Anchor>>> {
3487 let position = position.to_offset(self);
3488 let mut items = self.outline_items_containing(
3489 position.saturating_sub(1)..self.len().min(position + 1),
3490 false,
3491 theme,
3492 )?;
3493 let mut prev_depth = None;
3494 items.retain(|item| {
3495 let result = prev_depth.map_or(true, |prev_depth| item.depth > prev_depth);
3496 prev_depth = Some(item.depth);
3497 result
3498 });
3499 Some(items)
3500 }
3501
3502 pub fn outline_range_containing<T: ToOffset>(&self, range: Range<T>) -> Option<Range<Point>> {
3503 let range = range.to_offset(self);
3504 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3505 grammar.outline_config.as_ref().map(|c| &c.query)
3506 });
3507 let configs = matches
3508 .grammars()
3509 .iter()
3510 .map(|g| g.outline_config.as_ref().unwrap())
3511 .collect::<Vec<_>>();
3512
3513 while let Some(mat) = matches.peek() {
3514 let config = &configs[mat.grammar_index];
3515 let containing_item_node = maybe!({
3516 let item_node = mat.captures.iter().find_map(|cap| {
3517 if cap.index == config.item_capture_ix {
3518 Some(cap.node)
3519 } else {
3520 None
3521 }
3522 })?;
3523
3524 let item_byte_range = item_node.byte_range();
3525 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3526 None
3527 } else {
3528 Some(item_node)
3529 }
3530 });
3531
3532 if let Some(item_node) = containing_item_node {
3533 return Some(
3534 Point::from_ts_point(item_node.start_position())
3535 ..Point::from_ts_point(item_node.end_position()),
3536 );
3537 }
3538
3539 matches.advance();
3540 }
3541 None
3542 }
3543
3544 pub fn outline_items_containing<T: ToOffset>(
3545 &self,
3546 range: Range<T>,
3547 include_extra_context: bool,
3548 theme: Option<&SyntaxTheme>,
3549 ) -> Option<Vec<OutlineItem<Anchor>>> {
3550 let range = range.to_offset(self);
3551 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3552 grammar.outline_config.as_ref().map(|c| &c.query)
3553 });
3554 let configs = matches
3555 .grammars()
3556 .iter()
3557 .map(|g| g.outline_config.as_ref().unwrap())
3558 .collect::<Vec<_>>();
3559
3560 let mut items = Vec::new();
3561 let mut annotation_row_ranges: Vec<Range<u32>> = Vec::new();
3562 while let Some(mat) = matches.peek() {
3563 let config = &configs[mat.grammar_index];
3564 if let Some(item) =
3565 self.next_outline_item(config, &mat, &range, include_extra_context, theme)
3566 {
3567 items.push(item);
3568 } else if let Some(capture) = mat
3569 .captures
3570 .iter()
3571 .find(|capture| Some(capture.index) == config.annotation_capture_ix)
3572 {
3573 let capture_range = capture.node.start_position()..capture.node.end_position();
3574 let mut capture_row_range =
3575 capture_range.start.row as u32..capture_range.end.row as u32;
3576 if capture_range.end.row > capture_range.start.row && capture_range.end.column == 0
3577 {
3578 capture_row_range.end -= 1;
3579 }
3580 if let Some(last_row_range) = annotation_row_ranges.last_mut() {
3581 if last_row_range.end >= capture_row_range.start.saturating_sub(1) {
3582 last_row_range.end = capture_row_range.end;
3583 } else {
3584 annotation_row_ranges.push(capture_row_range);
3585 }
3586 } else {
3587 annotation_row_ranges.push(capture_row_range);
3588 }
3589 }
3590 matches.advance();
3591 }
3592
3593 items.sort_by_key(|item| (item.range.start, Reverse(item.range.end)));
3594
3595 // Assign depths based on containment relationships and convert to anchors.
3596 let mut item_ends_stack = Vec::<Point>::new();
3597 let mut anchor_items = Vec::new();
3598 let mut annotation_row_ranges = annotation_row_ranges.into_iter().peekable();
3599 for item in items {
3600 while let Some(last_end) = item_ends_stack.last().copied() {
3601 if last_end < item.range.end {
3602 item_ends_stack.pop();
3603 } else {
3604 break;
3605 }
3606 }
3607
3608 let mut annotation_row_range = None;
3609 while let Some(next_annotation_row_range) = annotation_row_ranges.peek() {
3610 let row_preceding_item = item.range.start.row.saturating_sub(1);
3611 if next_annotation_row_range.end < row_preceding_item {
3612 annotation_row_ranges.next();
3613 } else {
3614 if next_annotation_row_range.end == row_preceding_item {
3615 annotation_row_range = Some(next_annotation_row_range.clone());
3616 annotation_row_ranges.next();
3617 }
3618 break;
3619 }
3620 }
3621
3622 anchor_items.push(OutlineItem {
3623 depth: item_ends_stack.len(),
3624 range: self.anchor_after(item.range.start)..self.anchor_before(item.range.end),
3625 text: item.text,
3626 highlight_ranges: item.highlight_ranges,
3627 name_ranges: item.name_ranges,
3628 body_range: item.body_range.map(|body_range| {
3629 self.anchor_after(body_range.start)..self.anchor_before(body_range.end)
3630 }),
3631 annotation_range: annotation_row_range.map(|annotation_range| {
3632 self.anchor_after(Point::new(annotation_range.start, 0))
3633 ..self.anchor_before(Point::new(
3634 annotation_range.end,
3635 self.line_len(annotation_range.end),
3636 ))
3637 }),
3638 });
3639 item_ends_stack.push(item.range.end);
3640 }
3641
3642 Some(anchor_items)
3643 }
3644
3645 fn next_outline_item(
3646 &self,
3647 config: &OutlineConfig,
3648 mat: &SyntaxMapMatch,
3649 range: &Range<usize>,
3650 include_extra_context: bool,
3651 theme: Option<&SyntaxTheme>,
3652 ) -> Option<OutlineItem<Point>> {
3653 let item_node = mat.captures.iter().find_map(|cap| {
3654 if cap.index == config.item_capture_ix {
3655 Some(cap.node)
3656 } else {
3657 None
3658 }
3659 })?;
3660
3661 let item_byte_range = item_node.byte_range();
3662 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3663 return None;
3664 }
3665 let item_point_range = Point::from_ts_point(item_node.start_position())
3666 ..Point::from_ts_point(item_node.end_position());
3667
3668 let mut open_point = None;
3669 let mut close_point = None;
3670 let mut buffer_ranges = Vec::new();
3671 for capture in mat.captures {
3672 let node_is_name;
3673 if capture.index == config.name_capture_ix {
3674 node_is_name = true;
3675 } else if Some(capture.index) == config.context_capture_ix
3676 || (Some(capture.index) == config.extra_context_capture_ix && include_extra_context)
3677 {
3678 node_is_name = false;
3679 } else {
3680 if Some(capture.index) == config.open_capture_ix {
3681 open_point = Some(Point::from_ts_point(capture.node.end_position()));
3682 } else if Some(capture.index) == config.close_capture_ix {
3683 close_point = Some(Point::from_ts_point(capture.node.start_position()));
3684 }
3685
3686 continue;
3687 }
3688
3689 let mut range = capture.node.start_byte()..capture.node.end_byte();
3690 let start = capture.node.start_position();
3691 if capture.node.end_position().row > start.row {
3692 range.end = range.start + self.line_len(start.row as u32) as usize - start.column;
3693 }
3694
3695 if !range.is_empty() {
3696 buffer_ranges.push((range, node_is_name));
3697 }
3698 }
3699 if buffer_ranges.is_empty() {
3700 return None;
3701 }
3702 let mut text = String::new();
3703 let mut highlight_ranges = Vec::new();
3704 let mut name_ranges = Vec::new();
3705 let mut chunks = self.chunks(
3706 buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end,
3707 true,
3708 );
3709 let mut last_buffer_range_end = 0;
3710
3711 for (buffer_range, is_name) in buffer_ranges {
3712 let space_added = !text.is_empty() && buffer_range.start > last_buffer_range_end;
3713 if space_added {
3714 text.push(' ');
3715 }
3716 let before_append_len = text.len();
3717 let mut offset = buffer_range.start;
3718 chunks.seek(buffer_range.clone());
3719 for mut chunk in chunks.by_ref() {
3720 if chunk.text.len() > buffer_range.end - offset {
3721 chunk.text = &chunk.text[0..(buffer_range.end - offset)];
3722 offset = buffer_range.end;
3723 } else {
3724 offset += chunk.text.len();
3725 }
3726 let style = chunk
3727 .syntax_highlight_id
3728 .zip(theme)
3729 .and_then(|(highlight, theme)| highlight.style(theme));
3730 if let Some(style) = style {
3731 let start = text.len();
3732 let end = start + chunk.text.len();
3733 highlight_ranges.push((start..end, style));
3734 }
3735 text.push_str(chunk.text);
3736 if offset >= buffer_range.end {
3737 break;
3738 }
3739 }
3740 if is_name {
3741 let after_append_len = text.len();
3742 let start = if space_added && !name_ranges.is_empty() {
3743 before_append_len - 1
3744 } else {
3745 before_append_len
3746 };
3747 name_ranges.push(start..after_append_len);
3748 }
3749 last_buffer_range_end = buffer_range.end;
3750 }
3751
3752 Some(OutlineItem {
3753 depth: 0, // We'll calculate the depth later
3754 range: item_point_range,
3755 text,
3756 highlight_ranges,
3757 name_ranges,
3758 body_range: open_point.zip(close_point).map(|(start, end)| start..end),
3759 annotation_range: None,
3760 })
3761 }
3762
3763 pub fn function_body_fold_ranges<T: ToOffset>(
3764 &self,
3765 within: Range<T>,
3766 ) -> impl Iterator<Item = Range<usize>> + '_ {
3767 self.text_object_ranges(within, TreeSitterOptions::default())
3768 .filter_map(|(range, obj)| (obj == TextObject::InsideFunction).then_some(range))
3769 }
3770
3771 /// For each grammar in the language, runs the provided
3772 /// [`tree_sitter::Query`] against the given range.
3773 pub fn matches(
3774 &self,
3775 range: Range<usize>,
3776 query: fn(&Grammar) -> Option<&tree_sitter::Query>,
3777 ) -> SyntaxMapMatches {
3778 self.syntax.matches(range, self, query)
3779 }
3780
3781 pub fn all_bracket_ranges(
3782 &self,
3783 range: Range<usize>,
3784 ) -> impl Iterator<Item = BracketMatch> + '_ {
3785 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3786 grammar.brackets_config.as_ref().map(|c| &c.query)
3787 });
3788 let configs = matches
3789 .grammars()
3790 .iter()
3791 .map(|grammar| grammar.brackets_config.as_ref().unwrap())
3792 .collect::<Vec<_>>();
3793
3794 iter::from_fn(move || {
3795 while let Some(mat) = matches.peek() {
3796 let mut open = None;
3797 let mut close = None;
3798 let config = &configs[mat.grammar_index];
3799 let pattern = &config.patterns[mat.pattern_index];
3800 for capture in mat.captures {
3801 if capture.index == config.open_capture_ix {
3802 open = Some(capture.node.byte_range());
3803 } else if capture.index == config.close_capture_ix {
3804 close = Some(capture.node.byte_range());
3805 }
3806 }
3807
3808 matches.advance();
3809
3810 let Some((open_range, close_range)) = open.zip(close) else {
3811 continue;
3812 };
3813
3814 let bracket_range = open_range.start..=close_range.end;
3815 if !bracket_range.overlaps(&range) {
3816 continue;
3817 }
3818
3819 return Some(BracketMatch {
3820 open_range,
3821 close_range,
3822 newline_only: pattern.newline_only,
3823 });
3824 }
3825 None
3826 })
3827 }
3828
3829 /// Returns bracket range pairs overlapping or adjacent to `range`
3830 pub fn bracket_ranges<T: ToOffset>(
3831 &self,
3832 range: Range<T>,
3833 ) -> impl Iterator<Item = BracketMatch> + '_ {
3834 // Find bracket pairs that *inclusively* contain the given range.
3835 let range = range.start.to_offset(self).saturating_sub(1)
3836 ..self.len().min(range.end.to_offset(self) + 1);
3837 self.all_bracket_ranges(range)
3838 .filter(|pair| !pair.newline_only)
3839 }
3840
3841 pub fn text_object_ranges<T: ToOffset>(
3842 &self,
3843 range: Range<T>,
3844 options: TreeSitterOptions,
3845 ) -> impl Iterator<Item = (Range<usize>, TextObject)> + '_ {
3846 let range = range.start.to_offset(self).saturating_sub(1)
3847 ..self.len().min(range.end.to_offset(self) + 1);
3848
3849 let mut matches =
3850 self.syntax
3851 .matches_with_options(range.clone(), &self.text, options, |grammar| {
3852 grammar.text_object_config.as_ref().map(|c| &c.query)
3853 });
3854
3855 let configs = matches
3856 .grammars()
3857 .iter()
3858 .map(|grammar| grammar.text_object_config.as_ref())
3859 .collect::<Vec<_>>();
3860
3861 let mut captures = Vec::<(Range<usize>, TextObject)>::new();
3862
3863 iter::from_fn(move || {
3864 loop {
3865 while let Some(capture) = captures.pop() {
3866 if capture.0.overlaps(&range) {
3867 return Some(capture);
3868 }
3869 }
3870
3871 let mat = matches.peek()?;
3872
3873 let Some(config) = configs[mat.grammar_index].as_ref() else {
3874 matches.advance();
3875 continue;
3876 };
3877
3878 for capture in mat.captures {
3879 let Some(ix) = config
3880 .text_objects_by_capture_ix
3881 .binary_search_by_key(&capture.index, |e| e.0)
3882 .ok()
3883 else {
3884 continue;
3885 };
3886 let text_object = config.text_objects_by_capture_ix[ix].1;
3887 let byte_range = capture.node.byte_range();
3888
3889 let mut found = false;
3890 for (range, existing) in captures.iter_mut() {
3891 if existing == &text_object {
3892 range.start = range.start.min(byte_range.start);
3893 range.end = range.end.max(byte_range.end);
3894 found = true;
3895 break;
3896 }
3897 }
3898
3899 if !found {
3900 captures.push((byte_range, text_object));
3901 }
3902 }
3903
3904 matches.advance();
3905 }
3906 })
3907 }
3908
3909 /// Returns enclosing bracket ranges containing the given range
3910 pub fn enclosing_bracket_ranges<T: ToOffset>(
3911 &self,
3912 range: Range<T>,
3913 ) -> impl Iterator<Item = BracketMatch> + '_ {
3914 let range = range.start.to_offset(self)..range.end.to_offset(self);
3915
3916 self.bracket_ranges(range.clone()).filter(move |pair| {
3917 pair.open_range.start <= range.start && pair.close_range.end >= range.end
3918 })
3919 }
3920
3921 /// Returns the smallest enclosing bracket ranges containing the given range or None if no brackets contain range
3922 ///
3923 /// Can optionally pass a range_filter to filter the ranges of brackets to consider
3924 pub fn innermost_enclosing_bracket_ranges<T: ToOffset>(
3925 &self,
3926 range: Range<T>,
3927 range_filter: Option<&dyn Fn(Range<usize>, Range<usize>) -> bool>,
3928 ) -> Option<(Range<usize>, Range<usize>)> {
3929 let range = range.start.to_offset(self)..range.end.to_offset(self);
3930
3931 // Get the ranges of the innermost pair of brackets.
3932 let mut result: Option<(Range<usize>, Range<usize>)> = None;
3933
3934 for pair in self.enclosing_bracket_ranges(range.clone()) {
3935 if let Some(range_filter) = range_filter {
3936 if !range_filter(pair.open_range.clone(), pair.close_range.clone()) {
3937 continue;
3938 }
3939 }
3940
3941 let len = pair.close_range.end - pair.open_range.start;
3942
3943 if let Some((existing_open, existing_close)) = &result {
3944 let existing_len = existing_close.end - existing_open.start;
3945 if len > existing_len {
3946 continue;
3947 }
3948 }
3949
3950 result = Some((pair.open_range, pair.close_range));
3951 }
3952
3953 result
3954 }
3955
3956 /// Returns anchor ranges for any matches of the redaction query.
3957 /// The buffer can be associated with multiple languages, and the redaction query associated with each
3958 /// will be run on the relevant section of the buffer.
3959 pub fn redacted_ranges<T: ToOffset>(
3960 &self,
3961 range: Range<T>,
3962 ) -> impl Iterator<Item = Range<usize>> + '_ {
3963 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
3964 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
3965 grammar
3966 .redactions_config
3967 .as_ref()
3968 .map(|config| &config.query)
3969 });
3970
3971 let configs = syntax_matches
3972 .grammars()
3973 .iter()
3974 .map(|grammar| grammar.redactions_config.as_ref())
3975 .collect::<Vec<_>>();
3976
3977 iter::from_fn(move || {
3978 let redacted_range = syntax_matches
3979 .peek()
3980 .and_then(|mat| {
3981 configs[mat.grammar_index].and_then(|config| {
3982 mat.captures
3983 .iter()
3984 .find(|capture| capture.index == config.redaction_capture_ix)
3985 })
3986 })
3987 .map(|mat| mat.node.byte_range());
3988 syntax_matches.advance();
3989 redacted_range
3990 })
3991 }
3992
3993 pub fn injections_intersecting_range<T: ToOffset>(
3994 &self,
3995 range: Range<T>,
3996 ) -> impl Iterator<Item = (Range<usize>, &Arc<Language>)> + '_ {
3997 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
3998
3999 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4000 grammar
4001 .injection_config
4002 .as_ref()
4003 .map(|config| &config.query)
4004 });
4005
4006 let configs = syntax_matches
4007 .grammars()
4008 .iter()
4009 .map(|grammar| grammar.injection_config.as_ref())
4010 .collect::<Vec<_>>();
4011
4012 iter::from_fn(move || {
4013 let ranges = syntax_matches.peek().and_then(|mat| {
4014 let config = &configs[mat.grammar_index]?;
4015 let content_capture_range = mat.captures.iter().find_map(|capture| {
4016 if capture.index == config.content_capture_ix {
4017 Some(capture.node.byte_range())
4018 } else {
4019 None
4020 }
4021 })?;
4022 let language = self.language_at(content_capture_range.start)?;
4023 Some((content_capture_range, language))
4024 });
4025 syntax_matches.advance();
4026 ranges
4027 })
4028 }
4029
4030 pub fn runnable_ranges(
4031 &self,
4032 offset_range: Range<usize>,
4033 ) -> impl Iterator<Item = RunnableRange> + '_ {
4034 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4035 grammar.runnable_config.as_ref().map(|config| &config.query)
4036 });
4037
4038 let test_configs = syntax_matches
4039 .grammars()
4040 .iter()
4041 .map(|grammar| grammar.runnable_config.as_ref())
4042 .collect::<Vec<_>>();
4043
4044 iter::from_fn(move || {
4045 loop {
4046 let mat = syntax_matches.peek()?;
4047
4048 let test_range = test_configs[mat.grammar_index].and_then(|test_configs| {
4049 let mut run_range = None;
4050 let full_range = mat.captures.iter().fold(
4051 Range {
4052 start: usize::MAX,
4053 end: 0,
4054 },
4055 |mut acc, next| {
4056 let byte_range = next.node.byte_range();
4057 if acc.start > byte_range.start {
4058 acc.start = byte_range.start;
4059 }
4060 if acc.end < byte_range.end {
4061 acc.end = byte_range.end;
4062 }
4063 acc
4064 },
4065 );
4066 if full_range.start > full_range.end {
4067 // We did not find a full spanning range of this match.
4068 return None;
4069 }
4070 let extra_captures: SmallVec<[_; 1]> =
4071 SmallVec::from_iter(mat.captures.iter().filter_map(|capture| {
4072 test_configs
4073 .extra_captures
4074 .get(capture.index as usize)
4075 .cloned()
4076 .and_then(|tag_name| match tag_name {
4077 RunnableCapture::Named(name) => {
4078 Some((capture.node.byte_range(), name))
4079 }
4080 RunnableCapture::Run => {
4081 let _ = run_range.insert(capture.node.byte_range());
4082 None
4083 }
4084 })
4085 }));
4086 let run_range = run_range?;
4087 let tags = test_configs
4088 .query
4089 .property_settings(mat.pattern_index)
4090 .iter()
4091 .filter_map(|property| {
4092 if *property.key == *"tag" {
4093 property
4094 .value
4095 .as_ref()
4096 .map(|value| RunnableTag(value.to_string().into()))
4097 } else {
4098 None
4099 }
4100 })
4101 .collect();
4102 let extra_captures = extra_captures
4103 .into_iter()
4104 .map(|(range, name)| {
4105 (
4106 name.to_string(),
4107 self.text_for_range(range.clone()).collect::<String>(),
4108 )
4109 })
4110 .collect();
4111 // All tags should have the same range.
4112 Some(RunnableRange {
4113 run_range,
4114 full_range,
4115 runnable: Runnable {
4116 tags,
4117 language: mat.language,
4118 buffer: self.remote_id(),
4119 },
4120 extra_captures,
4121 buffer_id: self.remote_id(),
4122 })
4123 });
4124
4125 syntax_matches.advance();
4126 if test_range.is_some() {
4127 // It's fine for us to short-circuit on .peek()? returning None. We don't want to return None from this iter if we
4128 // had a capture that did not contain a run marker, hence we'll just loop around for the next capture.
4129 return test_range;
4130 }
4131 }
4132 })
4133 }
4134
4135 /// Returns selections for remote peers intersecting the given range.
4136 #[allow(clippy::type_complexity)]
4137 pub fn selections_in_range(
4138 &self,
4139 range: Range<Anchor>,
4140 include_local: bool,
4141 ) -> impl Iterator<
4142 Item = (
4143 ReplicaId,
4144 bool,
4145 CursorShape,
4146 impl Iterator<Item = &Selection<Anchor>> + '_,
4147 ),
4148 > + '_ {
4149 self.remote_selections
4150 .iter()
4151 .filter(move |(replica_id, set)| {
4152 (include_local || **replica_id != self.text.replica_id())
4153 && !set.selections.is_empty()
4154 })
4155 .map(move |(replica_id, set)| {
4156 let start_ix = match set.selections.binary_search_by(|probe| {
4157 probe.end.cmp(&range.start, self).then(Ordering::Greater)
4158 }) {
4159 Ok(ix) | Err(ix) => ix,
4160 };
4161 let end_ix = match set.selections.binary_search_by(|probe| {
4162 probe.start.cmp(&range.end, self).then(Ordering::Less)
4163 }) {
4164 Ok(ix) | Err(ix) => ix,
4165 };
4166
4167 (
4168 *replica_id,
4169 set.line_mode,
4170 set.cursor_shape,
4171 set.selections[start_ix..end_ix].iter(),
4172 )
4173 })
4174 }
4175
4176 /// Returns if the buffer contains any diagnostics.
4177 pub fn has_diagnostics(&self) -> bool {
4178 !self.diagnostics.is_empty()
4179 }
4180
4181 /// Returns all the diagnostics intersecting the given range.
4182 pub fn diagnostics_in_range<'a, T, O>(
4183 &'a self,
4184 search_range: Range<T>,
4185 reversed: bool,
4186 ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
4187 where
4188 T: 'a + Clone + ToOffset,
4189 O: 'a + FromAnchor,
4190 {
4191 let mut iterators: Vec<_> = self
4192 .diagnostics
4193 .iter()
4194 .map(|(_, collection)| {
4195 collection
4196 .range::<T, text::Anchor>(search_range.clone(), self, true, reversed)
4197 .peekable()
4198 })
4199 .collect();
4200
4201 std::iter::from_fn(move || {
4202 let (next_ix, _) = iterators
4203 .iter_mut()
4204 .enumerate()
4205 .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
4206 .min_by(|(_, a), (_, b)| {
4207 let cmp = a
4208 .range
4209 .start
4210 .cmp(&b.range.start, self)
4211 // when range is equal, sort by diagnostic severity
4212 .then(a.diagnostic.severity.cmp(&b.diagnostic.severity))
4213 // and stabilize order with group_id
4214 .then(a.diagnostic.group_id.cmp(&b.diagnostic.group_id));
4215 if reversed { cmp.reverse() } else { cmp }
4216 })?;
4217 iterators[next_ix]
4218 .next()
4219 .map(|DiagnosticEntry { range, diagnostic }| DiagnosticEntry {
4220 diagnostic,
4221 range: FromAnchor::from_anchor(&range.start, self)
4222 ..FromAnchor::from_anchor(&range.end, self),
4223 })
4224 })
4225 }
4226
4227 /// Returns all the diagnostic groups associated with the given
4228 /// language server ID. If no language server ID is provided,
4229 /// all diagnostics groups are returned.
4230 pub fn diagnostic_groups(
4231 &self,
4232 language_server_id: Option<LanguageServerId>,
4233 ) -> Vec<(LanguageServerId, DiagnosticGroup<Anchor>)> {
4234 let mut groups = Vec::new();
4235
4236 if let Some(language_server_id) = language_server_id {
4237 if let Ok(ix) = self
4238 .diagnostics
4239 .binary_search_by_key(&language_server_id, |e| e.0)
4240 {
4241 self.diagnostics[ix]
4242 .1
4243 .groups(language_server_id, &mut groups, self);
4244 }
4245 } else {
4246 for (language_server_id, diagnostics) in self.diagnostics.iter() {
4247 diagnostics.groups(*language_server_id, &mut groups, self);
4248 }
4249 }
4250
4251 groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
4252 let a_start = &group_a.entries[group_a.primary_ix].range.start;
4253 let b_start = &group_b.entries[group_b.primary_ix].range.start;
4254 a_start.cmp(b_start, self).then_with(|| id_a.cmp(id_b))
4255 });
4256
4257 groups
4258 }
4259
4260 /// Returns an iterator over the diagnostics for the given group.
4261 pub fn diagnostic_group<O>(
4262 &self,
4263 group_id: usize,
4264 ) -> impl Iterator<Item = DiagnosticEntry<O>> + '_
4265 where
4266 O: FromAnchor + 'static,
4267 {
4268 self.diagnostics
4269 .iter()
4270 .flat_map(move |(_, set)| set.group(group_id, self))
4271 }
4272
4273 /// An integer version number that accounts for all updates besides
4274 /// the buffer's text itself (which is versioned via a version vector).
4275 pub fn non_text_state_update_count(&self) -> usize {
4276 self.non_text_state_update_count
4277 }
4278
4279 /// Returns a snapshot of underlying file.
4280 pub fn file(&self) -> Option<&Arc<dyn File>> {
4281 self.file.as_ref()
4282 }
4283
4284 /// Resolves the file path (relative to the worktree root) associated with the underlying file.
4285 pub fn resolve_file_path(&self, cx: &App, include_root: bool) -> Option<PathBuf> {
4286 if let Some(file) = self.file() {
4287 if file.path().file_name().is_none() || include_root {
4288 Some(file.full_path(cx))
4289 } else {
4290 Some(file.path().to_path_buf())
4291 }
4292 } else {
4293 None
4294 }
4295 }
4296
4297 pub fn words_in_range(&self, query: WordsQuery) -> BTreeMap<String, Range<Anchor>> {
4298 let query_str = query.fuzzy_contents;
4299 if query_str.map_or(false, |query| query.is_empty()) {
4300 return BTreeMap::default();
4301 }
4302
4303 let classifier = CharClassifier::new(self.language.clone().map(|language| LanguageScope {
4304 language,
4305 override_id: None,
4306 }));
4307
4308 let mut query_ix = 0;
4309 let query_chars = query_str.map(|query| query.chars().collect::<Vec<_>>());
4310 let query_len = query_chars.as_ref().map_or(0, |query| query.len());
4311
4312 let mut words = BTreeMap::default();
4313 let mut current_word_start_ix = None;
4314 let mut chunk_ix = query.range.start;
4315 for chunk in self.chunks(query.range, false) {
4316 for (i, c) in chunk.text.char_indices() {
4317 let ix = chunk_ix + i;
4318 if classifier.is_word(c) {
4319 if current_word_start_ix.is_none() {
4320 current_word_start_ix = Some(ix);
4321 }
4322
4323 if let Some(query_chars) = &query_chars {
4324 if query_ix < query_len {
4325 if c.to_lowercase().eq(query_chars[query_ix].to_lowercase()) {
4326 query_ix += 1;
4327 }
4328 }
4329 }
4330 continue;
4331 } else if let Some(word_start) = current_word_start_ix.take() {
4332 if query_ix == query_len {
4333 let word_range = self.anchor_before(word_start)..self.anchor_after(ix);
4334 let mut word_text = self.text_for_range(word_start..ix).peekable();
4335 let first_char = word_text
4336 .peek()
4337 .and_then(|first_chunk| first_chunk.chars().next());
4338 // Skip empty and "words" starting with digits as a heuristic to reduce useless completions
4339 if !query.skip_digits
4340 || first_char.map_or(true, |first_char| !first_char.is_digit(10))
4341 {
4342 words.insert(word_text.collect(), word_range);
4343 }
4344 }
4345 }
4346 query_ix = 0;
4347 }
4348 chunk_ix += chunk.text.len();
4349 }
4350
4351 words
4352 }
4353}
4354
4355pub struct WordsQuery<'a> {
4356 /// Only returns words with all chars from the fuzzy string in them.
4357 pub fuzzy_contents: Option<&'a str>,
4358 /// Skips words that start with a digit.
4359 pub skip_digits: bool,
4360 /// Buffer offset range, to look for words.
4361 pub range: Range<usize>,
4362}
4363
4364fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
4365 indent_size_for_text(text.chars_at(Point::new(row, 0)))
4366}
4367
4368fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
4369 let mut result = IndentSize::spaces(0);
4370 for c in text {
4371 let kind = match c {
4372 ' ' => IndentKind::Space,
4373 '\t' => IndentKind::Tab,
4374 _ => break,
4375 };
4376 if result.len == 0 {
4377 result.kind = kind;
4378 }
4379 result.len += 1;
4380 }
4381 result
4382}
4383
4384impl Clone for BufferSnapshot {
4385 fn clone(&self) -> Self {
4386 Self {
4387 text: self.text.clone(),
4388 syntax: self.syntax.clone(),
4389 file: self.file.clone(),
4390 remote_selections: self.remote_selections.clone(),
4391 diagnostics: self.diagnostics.clone(),
4392 language: self.language.clone(),
4393 non_text_state_update_count: self.non_text_state_update_count,
4394 }
4395 }
4396}
4397
4398impl Deref for BufferSnapshot {
4399 type Target = text::BufferSnapshot;
4400
4401 fn deref(&self) -> &Self::Target {
4402 &self.text
4403 }
4404}
4405
4406unsafe impl Send for BufferChunks<'_> {}
4407
4408impl<'a> BufferChunks<'a> {
4409 pub(crate) fn new(
4410 text: &'a Rope,
4411 range: Range<usize>,
4412 syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
4413 diagnostics: bool,
4414 buffer_snapshot: Option<&'a BufferSnapshot>,
4415 ) -> Self {
4416 let mut highlights = None;
4417 if let Some((captures, highlight_maps)) = syntax {
4418 highlights = Some(BufferChunkHighlights {
4419 captures,
4420 next_capture: None,
4421 stack: Default::default(),
4422 highlight_maps,
4423 })
4424 }
4425
4426 let diagnostic_endpoints = diagnostics.then(|| Vec::new().into_iter().peekable());
4427 let chunks = text.chunks_in_range(range.clone());
4428
4429 let mut this = BufferChunks {
4430 range,
4431 buffer_snapshot,
4432 chunks,
4433 diagnostic_endpoints,
4434 error_depth: 0,
4435 warning_depth: 0,
4436 information_depth: 0,
4437 hint_depth: 0,
4438 unnecessary_depth: 0,
4439 underline: true,
4440 highlights,
4441 };
4442 this.initialize_diagnostic_endpoints();
4443 this
4444 }
4445
4446 /// Seeks to the given byte offset in the buffer.
4447 pub fn seek(&mut self, range: Range<usize>) {
4448 let old_range = std::mem::replace(&mut self.range, range.clone());
4449 self.chunks.set_range(self.range.clone());
4450 if let Some(highlights) = self.highlights.as_mut() {
4451 if old_range.start <= self.range.start && old_range.end >= self.range.end {
4452 // Reuse existing highlights stack, as the new range is a subrange of the old one.
4453 highlights
4454 .stack
4455 .retain(|(end_offset, _)| *end_offset > range.start);
4456 if let Some(capture) = &highlights.next_capture {
4457 if range.start >= capture.node.start_byte() {
4458 let next_capture_end = capture.node.end_byte();
4459 if range.start < next_capture_end {
4460 highlights.stack.push((
4461 next_capture_end,
4462 highlights.highlight_maps[capture.grammar_index].get(capture.index),
4463 ));
4464 }
4465 highlights.next_capture.take();
4466 }
4467 }
4468 } else if let Some(snapshot) = self.buffer_snapshot {
4469 let (captures, highlight_maps) = snapshot.get_highlights(self.range.clone());
4470 *highlights = BufferChunkHighlights {
4471 captures,
4472 next_capture: None,
4473 stack: Default::default(),
4474 highlight_maps,
4475 };
4476 } else {
4477 // We cannot obtain new highlights for a language-aware buffer iterator, as we don't have a buffer snapshot.
4478 // Seeking such BufferChunks is not supported.
4479 debug_assert!(
4480 false,
4481 "Attempted to seek on a language-aware buffer iterator without associated buffer snapshot"
4482 );
4483 }
4484
4485 highlights.captures.set_byte_range(self.range.clone());
4486 self.initialize_diagnostic_endpoints();
4487 }
4488 }
4489
4490 fn initialize_diagnostic_endpoints(&mut self) {
4491 if let Some(diagnostics) = self.diagnostic_endpoints.as_mut() {
4492 if let Some(buffer) = self.buffer_snapshot {
4493 let mut diagnostic_endpoints = Vec::new();
4494 for entry in buffer.diagnostics_in_range::<_, usize>(self.range.clone(), false) {
4495 diagnostic_endpoints.push(DiagnosticEndpoint {
4496 offset: entry.range.start,
4497 is_start: true,
4498 severity: entry.diagnostic.severity,
4499 is_unnecessary: entry.diagnostic.is_unnecessary,
4500 underline: entry.diagnostic.underline,
4501 });
4502 diagnostic_endpoints.push(DiagnosticEndpoint {
4503 offset: entry.range.end,
4504 is_start: false,
4505 severity: entry.diagnostic.severity,
4506 is_unnecessary: entry.diagnostic.is_unnecessary,
4507 underline: entry.diagnostic.underline,
4508 });
4509 }
4510 diagnostic_endpoints
4511 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
4512 *diagnostics = diagnostic_endpoints.into_iter().peekable();
4513 self.hint_depth = 0;
4514 self.error_depth = 0;
4515 self.warning_depth = 0;
4516 self.information_depth = 0;
4517 }
4518 }
4519 }
4520
4521 /// The current byte offset in the buffer.
4522 pub fn offset(&self) -> usize {
4523 self.range.start
4524 }
4525
4526 pub fn range(&self) -> Range<usize> {
4527 self.range.clone()
4528 }
4529
4530 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
4531 let depth = match endpoint.severity {
4532 DiagnosticSeverity::ERROR => &mut self.error_depth,
4533 DiagnosticSeverity::WARNING => &mut self.warning_depth,
4534 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
4535 DiagnosticSeverity::HINT => &mut self.hint_depth,
4536 _ => return,
4537 };
4538 if endpoint.is_start {
4539 *depth += 1;
4540 } else {
4541 *depth -= 1;
4542 }
4543
4544 if endpoint.is_unnecessary {
4545 if endpoint.is_start {
4546 self.unnecessary_depth += 1;
4547 } else {
4548 self.unnecessary_depth -= 1;
4549 }
4550 }
4551 }
4552
4553 fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
4554 if self.error_depth > 0 {
4555 Some(DiagnosticSeverity::ERROR)
4556 } else if self.warning_depth > 0 {
4557 Some(DiagnosticSeverity::WARNING)
4558 } else if self.information_depth > 0 {
4559 Some(DiagnosticSeverity::INFORMATION)
4560 } else if self.hint_depth > 0 {
4561 Some(DiagnosticSeverity::HINT)
4562 } else {
4563 None
4564 }
4565 }
4566
4567 fn current_code_is_unnecessary(&self) -> bool {
4568 self.unnecessary_depth > 0
4569 }
4570}
4571
4572impl<'a> Iterator for BufferChunks<'a> {
4573 type Item = Chunk<'a>;
4574
4575 fn next(&mut self) -> Option<Self::Item> {
4576 let mut next_capture_start = usize::MAX;
4577 let mut next_diagnostic_endpoint = usize::MAX;
4578
4579 if let Some(highlights) = self.highlights.as_mut() {
4580 while let Some((parent_capture_end, _)) = highlights.stack.last() {
4581 if *parent_capture_end <= self.range.start {
4582 highlights.stack.pop();
4583 } else {
4584 break;
4585 }
4586 }
4587
4588 if highlights.next_capture.is_none() {
4589 highlights.next_capture = highlights.captures.next();
4590 }
4591
4592 while let Some(capture) = highlights.next_capture.as_ref() {
4593 if self.range.start < capture.node.start_byte() {
4594 next_capture_start = capture.node.start_byte();
4595 break;
4596 } else {
4597 let highlight_id =
4598 highlights.highlight_maps[capture.grammar_index].get(capture.index);
4599 highlights
4600 .stack
4601 .push((capture.node.end_byte(), highlight_id));
4602 highlights.next_capture = highlights.captures.next();
4603 }
4604 }
4605 }
4606
4607 let mut diagnostic_endpoints = std::mem::take(&mut self.diagnostic_endpoints);
4608 if let Some(diagnostic_endpoints) = diagnostic_endpoints.as_mut() {
4609 while let Some(endpoint) = diagnostic_endpoints.peek().copied() {
4610 if endpoint.offset <= self.range.start {
4611 self.update_diagnostic_depths(endpoint);
4612 diagnostic_endpoints.next();
4613 self.underline = endpoint.underline;
4614 } else {
4615 next_diagnostic_endpoint = endpoint.offset;
4616 break;
4617 }
4618 }
4619 }
4620 self.diagnostic_endpoints = diagnostic_endpoints;
4621
4622 if let Some(chunk) = self.chunks.peek() {
4623 let chunk_start = self.range.start;
4624 let mut chunk_end = (self.chunks.offset() + chunk.len())
4625 .min(next_capture_start)
4626 .min(next_diagnostic_endpoint);
4627 let mut highlight_id = None;
4628 if let Some(highlights) = self.highlights.as_ref() {
4629 if let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last() {
4630 chunk_end = chunk_end.min(*parent_capture_end);
4631 highlight_id = Some(*parent_highlight_id);
4632 }
4633 }
4634
4635 let slice =
4636 &chunk[chunk_start - self.chunks.offset()..chunk_end - self.chunks.offset()];
4637 self.range.start = chunk_end;
4638 if self.range.start == self.chunks.offset() + chunk.len() {
4639 self.chunks.next().unwrap();
4640 }
4641
4642 Some(Chunk {
4643 text: slice,
4644 syntax_highlight_id: highlight_id,
4645 underline: self.underline,
4646 diagnostic_severity: self.current_diagnostic_severity(),
4647 is_unnecessary: self.current_code_is_unnecessary(),
4648 ..Chunk::default()
4649 })
4650 } else {
4651 None
4652 }
4653 }
4654}
4655
4656impl operation_queue::Operation for Operation {
4657 fn lamport_timestamp(&self) -> clock::Lamport {
4658 match self {
4659 Operation::Buffer(_) => {
4660 unreachable!("buffer operations should never be deferred at this layer")
4661 }
4662 Operation::UpdateDiagnostics {
4663 lamport_timestamp, ..
4664 }
4665 | Operation::UpdateSelections {
4666 lamport_timestamp, ..
4667 }
4668 | Operation::UpdateCompletionTriggers {
4669 lamport_timestamp, ..
4670 } => *lamport_timestamp,
4671 }
4672 }
4673}
4674
4675impl Default for Diagnostic {
4676 fn default() -> Self {
4677 Self {
4678 source: Default::default(),
4679 source_kind: DiagnosticSourceKind::Other,
4680 code: None,
4681 code_description: None,
4682 severity: DiagnosticSeverity::ERROR,
4683 message: Default::default(),
4684 markdown: None,
4685 group_id: 0,
4686 is_primary: false,
4687 is_disk_based: false,
4688 is_unnecessary: false,
4689 underline: true,
4690 data: None,
4691 }
4692 }
4693}
4694
4695impl IndentSize {
4696 /// Returns an [`IndentSize`] representing the given spaces.
4697 pub fn spaces(len: u32) -> Self {
4698 Self {
4699 len,
4700 kind: IndentKind::Space,
4701 }
4702 }
4703
4704 /// Returns an [`IndentSize`] representing a tab.
4705 pub fn tab() -> Self {
4706 Self {
4707 len: 1,
4708 kind: IndentKind::Tab,
4709 }
4710 }
4711
4712 /// An iterator over the characters represented by this [`IndentSize`].
4713 pub fn chars(&self) -> impl Iterator<Item = char> {
4714 iter::repeat(self.char()).take(self.len as usize)
4715 }
4716
4717 /// The character representation of this [`IndentSize`].
4718 pub fn char(&self) -> char {
4719 match self.kind {
4720 IndentKind::Space => ' ',
4721 IndentKind::Tab => '\t',
4722 }
4723 }
4724
4725 /// Consumes the current [`IndentSize`] and returns a new one that has
4726 /// been shrunk or enlarged by the given size along the given direction.
4727 pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
4728 match direction {
4729 Ordering::Less => {
4730 if self.kind == size.kind && self.len >= size.len {
4731 self.len -= size.len;
4732 }
4733 }
4734 Ordering::Equal => {}
4735 Ordering::Greater => {
4736 if self.len == 0 {
4737 self = size;
4738 } else if self.kind == size.kind {
4739 self.len += size.len;
4740 }
4741 }
4742 }
4743 self
4744 }
4745
4746 pub fn len_with_expanded_tabs(&self, tab_size: NonZeroU32) -> usize {
4747 match self.kind {
4748 IndentKind::Space => self.len as usize,
4749 IndentKind::Tab => self.len as usize * tab_size.get() as usize,
4750 }
4751 }
4752}
4753
4754#[cfg(any(test, feature = "test-support"))]
4755pub struct TestFile {
4756 pub path: Arc<Path>,
4757 pub root_name: String,
4758 pub local_root: Option<PathBuf>,
4759}
4760
4761#[cfg(any(test, feature = "test-support"))]
4762impl File for TestFile {
4763 fn path(&self) -> &Arc<Path> {
4764 &self.path
4765 }
4766
4767 fn full_path(&self, _: &gpui::App) -> PathBuf {
4768 PathBuf::from(&self.root_name).join(self.path.as_ref())
4769 }
4770
4771 fn as_local(&self) -> Option<&dyn LocalFile> {
4772 if self.local_root.is_some() {
4773 Some(self)
4774 } else {
4775 None
4776 }
4777 }
4778
4779 fn disk_state(&self) -> DiskState {
4780 unimplemented!()
4781 }
4782
4783 fn file_name<'a>(&'a self, _: &'a gpui::App) -> &'a std::ffi::OsStr {
4784 self.path().file_name().unwrap_or(self.root_name.as_ref())
4785 }
4786
4787 fn worktree_id(&self, _: &App) -> WorktreeId {
4788 WorktreeId::from_usize(0)
4789 }
4790
4791 fn to_proto(&self, _: &App) -> rpc::proto::File {
4792 unimplemented!()
4793 }
4794
4795 fn is_private(&self) -> bool {
4796 false
4797 }
4798}
4799
4800#[cfg(any(test, feature = "test-support"))]
4801impl LocalFile for TestFile {
4802 fn abs_path(&self, _cx: &App) -> PathBuf {
4803 PathBuf::from(self.local_root.as_ref().unwrap())
4804 .join(&self.root_name)
4805 .join(self.path.as_ref())
4806 }
4807
4808 fn load(&self, _cx: &App) -> Task<Result<String>> {
4809 unimplemented!()
4810 }
4811
4812 fn load_bytes(&self, _cx: &App) -> Task<Result<Vec<u8>>> {
4813 unimplemented!()
4814 }
4815}
4816
4817pub(crate) fn contiguous_ranges(
4818 values: impl Iterator<Item = u32>,
4819 max_len: usize,
4820) -> impl Iterator<Item = Range<u32>> {
4821 let mut values = values;
4822 let mut current_range: Option<Range<u32>> = None;
4823 std::iter::from_fn(move || {
4824 loop {
4825 if let Some(value) = values.next() {
4826 if let Some(range) = &mut current_range {
4827 if value == range.end && range.len() < max_len {
4828 range.end += 1;
4829 continue;
4830 }
4831 }
4832
4833 let prev_range = current_range.clone();
4834 current_range = Some(value..(value + 1));
4835 if prev_range.is_some() {
4836 return prev_range;
4837 }
4838 } else {
4839 return current_range.take();
4840 }
4841 }
4842 })
4843}
4844
4845#[derive(Default, Debug)]
4846pub struct CharClassifier {
4847 scope: Option<LanguageScope>,
4848 for_completion: bool,
4849 ignore_punctuation: bool,
4850}
4851
4852impl CharClassifier {
4853 pub fn new(scope: Option<LanguageScope>) -> Self {
4854 Self {
4855 scope,
4856 for_completion: false,
4857 ignore_punctuation: false,
4858 }
4859 }
4860
4861 pub fn for_completion(self, for_completion: bool) -> Self {
4862 Self {
4863 for_completion,
4864 ..self
4865 }
4866 }
4867
4868 pub fn ignore_punctuation(self, ignore_punctuation: bool) -> Self {
4869 Self {
4870 ignore_punctuation,
4871 ..self
4872 }
4873 }
4874
4875 pub fn is_whitespace(&self, c: char) -> bool {
4876 self.kind(c) == CharKind::Whitespace
4877 }
4878
4879 pub fn is_word(&self, c: char) -> bool {
4880 self.kind(c) == CharKind::Word
4881 }
4882
4883 pub fn is_punctuation(&self, c: char) -> bool {
4884 self.kind(c) == CharKind::Punctuation
4885 }
4886
4887 pub fn kind_with(&self, c: char, ignore_punctuation: bool) -> CharKind {
4888 if c.is_alphanumeric() || c == '_' {
4889 return CharKind::Word;
4890 }
4891
4892 if let Some(scope) = &self.scope {
4893 let characters = if self.for_completion {
4894 scope.completion_query_characters()
4895 } else {
4896 scope.word_characters()
4897 };
4898 if let Some(characters) = characters {
4899 if characters.contains(&c) {
4900 return CharKind::Word;
4901 }
4902 }
4903 }
4904
4905 if c.is_whitespace() {
4906 return CharKind::Whitespace;
4907 }
4908
4909 if ignore_punctuation {
4910 CharKind::Word
4911 } else {
4912 CharKind::Punctuation
4913 }
4914 }
4915
4916 pub fn kind(&self, c: char) -> CharKind {
4917 self.kind_with(c, self.ignore_punctuation)
4918 }
4919}
4920
4921/// Find all of the ranges of whitespace that occur at the ends of lines
4922/// in the given rope.
4923///
4924/// This could also be done with a regex search, but this implementation
4925/// avoids copying text.
4926pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
4927 let mut ranges = Vec::new();
4928
4929 let mut offset = 0;
4930 let mut prev_chunk_trailing_whitespace_range = 0..0;
4931 for chunk in rope.chunks() {
4932 let mut prev_line_trailing_whitespace_range = 0..0;
4933 for (i, line) in chunk.split('\n').enumerate() {
4934 let line_end_offset = offset + line.len();
4935 let trimmed_line_len = line.trim_end_matches([' ', '\t']).len();
4936 let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
4937
4938 if i == 0 && trimmed_line_len == 0 {
4939 trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
4940 }
4941 if !prev_line_trailing_whitespace_range.is_empty() {
4942 ranges.push(prev_line_trailing_whitespace_range);
4943 }
4944
4945 offset = line_end_offset + 1;
4946 prev_line_trailing_whitespace_range = trailing_whitespace_range;
4947 }
4948
4949 offset -= 1;
4950 prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
4951 }
4952
4953 if !prev_chunk_trailing_whitespace_range.is_empty() {
4954 ranges.push(prev_chunk_trailing_whitespace_range);
4955 }
4956
4957 ranges
4958}