1pub use crate::{
2 Grammar, Language, LanguageRegistry,
3 diagnostic_set::DiagnosticSet,
4 highlight_map::{HighlightId, HighlightMap},
5 proto,
6};
7use crate::{
8 LanguageScope, Outline, OutlineConfig, RunnableCapture, RunnableTag, TextObject,
9 TreeSitterOptions,
10 diagnostic_set::{DiagnosticEntry, DiagnosticGroup},
11 language_settings::{LanguageSettings, language_settings},
12 outline::OutlineItem,
13 syntax_map::{
14 SyntaxLayer, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures, SyntaxMapMatch,
15 SyntaxMapMatches, SyntaxSnapshot, ToTreeSitterPoint,
16 },
17 task_context::RunnableRange,
18 text_diff::text_diff,
19};
20use anyhow::{Context as _, Result};
21pub use clock::ReplicaId;
22use clock::{AGENT_REPLICA_ID, Lamport};
23use collections::HashMap;
24use fs::MTime;
25use futures::channel::oneshot;
26use gpui::{
27 App, AppContext as _, Context, Entity, EventEmitter, HighlightStyle, SharedString, StyledText,
28 Task, TaskLabel, TextStyle,
29};
30use lsp::{LanguageServerId, NumberOrString};
31use parking_lot::Mutex;
32use schemars::JsonSchema;
33use serde::{Deserialize, Serialize};
34use serde_json::Value;
35use settings::WorktreeId;
36use smallvec::SmallVec;
37use smol::future::yield_now;
38use std::{
39 any::Any,
40 borrow::Cow,
41 cell::Cell,
42 cmp::{self, Ordering, Reverse},
43 collections::{BTreeMap, BTreeSet},
44 ffi::OsStr,
45 future::Future,
46 iter::{self, Iterator, Peekable},
47 mem,
48 num::NonZeroU32,
49 ops::{Deref, Range},
50 path::{Path, PathBuf},
51 rc,
52 sync::{Arc, LazyLock},
53 time::{Duration, Instant},
54 vec,
55};
56use sum_tree::TreeMap;
57use text::operation_queue::OperationQueue;
58use text::*;
59pub use text::{
60 Anchor, Bias, Buffer as TextBuffer, BufferId, BufferSnapshot as TextBufferSnapshot, Edit,
61 LineIndent, OffsetRangeExt, OffsetUtf16, Patch, Point, PointUtf16, Rope, Selection,
62 SelectionGoal, Subscription, TextDimension, TextSummary, ToOffset, ToOffsetUtf16, ToPoint,
63 ToPointUtf16, Transaction, TransactionId, Unclipped,
64};
65use theme::{ActiveTheme as _, SyntaxTheme};
66#[cfg(any(test, feature = "test-support"))]
67use util::RandomCharIter;
68use util::{RangeExt, debug_panic, maybe};
69
70#[cfg(any(test, feature = "test-support"))]
71pub use {tree_sitter_python, tree_sitter_rust, tree_sitter_typescript};
72
73pub use lsp::DiagnosticSeverity;
74
75/// A label for the background task spawned by the buffer to compute
76/// a diff against the contents of its file.
77pub static BUFFER_DIFF_TASK: LazyLock<TaskLabel> = LazyLock::new(TaskLabel::new);
78
79/// Indicate whether a [`Buffer`] has permissions to edit.
80#[derive(PartialEq, Clone, Copy, Debug)]
81pub enum Capability {
82 /// The buffer is a mutable replica.
83 ReadWrite,
84 /// The buffer is a read-only replica.
85 ReadOnly,
86}
87
88pub type BufferRow = u32;
89
90/// An in-memory representation of a source code file, including its text,
91/// syntax trees, git status, and diagnostics.
92pub struct Buffer {
93 text: TextBuffer,
94 branch_state: Option<BufferBranchState>,
95 /// Filesystem state, `None` when there is no path.
96 file: Option<Arc<dyn File>>,
97 /// The mtime of the file when this buffer was last loaded from
98 /// or saved to disk.
99 saved_mtime: Option<MTime>,
100 /// The version vector when this buffer was last loaded from
101 /// or saved to disk.
102 saved_version: clock::Global,
103 preview_version: clock::Global,
104 transaction_depth: usize,
105 was_dirty_before_starting_transaction: Option<bool>,
106 reload_task: Option<Task<Result<()>>>,
107 language: Option<Arc<Language>>,
108 autoindent_requests: Vec<Arc<AutoindentRequest>>,
109 pending_autoindent: Option<Task<()>>,
110 sync_parse_timeout: Duration,
111 syntax_map: Mutex<SyntaxMap>,
112 reparse: Option<Task<()>>,
113 parse_status: (watch::Sender<ParseStatus>, watch::Receiver<ParseStatus>),
114 non_text_state_update_count: usize,
115 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
116 remote_selections: TreeMap<ReplicaId, SelectionSet>,
117 diagnostics_timestamp: clock::Lamport,
118 completion_triggers: BTreeSet<String>,
119 completion_triggers_per_language_server: HashMap<LanguageServerId, BTreeSet<String>>,
120 completion_triggers_timestamp: clock::Lamport,
121 deferred_ops: OperationQueue<Operation>,
122 capability: Capability,
123 has_conflict: bool,
124 /// Memoize calls to has_changes_since(saved_version).
125 /// The contents of a cell are (self.version, has_changes) at the time of a last call.
126 has_unsaved_edits: Cell<(clock::Global, bool)>,
127 change_bits: Vec<rc::Weak<Cell<bool>>>,
128 _subscriptions: Vec<gpui::Subscription>,
129}
130
131#[derive(Copy, Clone, Debug, PartialEq, Eq)]
132pub enum ParseStatus {
133 Idle,
134 Parsing,
135}
136
137struct BufferBranchState {
138 base_buffer: Entity<Buffer>,
139 merged_operations: Vec<Lamport>,
140}
141
142/// An immutable, cheaply cloneable representation of a fixed
143/// state of a buffer.
144pub struct BufferSnapshot {
145 pub text: text::BufferSnapshot,
146 pub(crate) syntax: SyntaxSnapshot,
147 file: Option<Arc<dyn File>>,
148 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
149 remote_selections: TreeMap<ReplicaId, SelectionSet>,
150 language: Option<Arc<Language>>,
151 non_text_state_update_count: usize,
152}
153
154/// The kind and amount of indentation in a particular line. For now,
155/// assumes that indentation is all the same character.
156#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
157pub struct IndentSize {
158 /// The number of bytes that comprise the indentation.
159 pub len: u32,
160 /// The kind of whitespace used for indentation.
161 pub kind: IndentKind,
162}
163
164/// A whitespace character that's used for indentation.
165#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
166pub enum IndentKind {
167 /// An ASCII space character.
168 #[default]
169 Space,
170 /// An ASCII tab character.
171 Tab,
172}
173
174/// The shape of a selection cursor.
175#[derive(Copy, Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq, JsonSchema)]
176#[serde(rename_all = "snake_case")]
177pub enum CursorShape {
178 /// A vertical bar
179 #[default]
180 Bar,
181 /// A block that surrounds the following character
182 Block,
183 /// An underline that runs along the following character
184 Underline,
185 /// A box drawn around the following character
186 Hollow,
187}
188
189#[derive(Clone, Debug)]
190struct SelectionSet {
191 line_mode: bool,
192 cursor_shape: CursorShape,
193 selections: Arc<[Selection<Anchor>]>,
194 lamport_timestamp: clock::Lamport,
195}
196
197/// A diagnostic associated with a certain range of a buffer.
198#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
199pub struct Diagnostic {
200 /// The name of the service that produced this diagnostic.
201 pub source: Option<String>,
202 /// A machine-readable code that identifies this diagnostic.
203 pub code: Option<NumberOrString>,
204 pub code_description: Option<lsp::Url>,
205 /// Whether this diagnostic is a hint, warning, or error.
206 pub severity: DiagnosticSeverity,
207 /// The human-readable message associated with this diagnostic.
208 pub message: String,
209 /// The human-readable message (in markdown format)
210 pub markdown: Option<String>,
211 /// An id that identifies the group to which this diagnostic belongs.
212 ///
213 /// When a language server produces a diagnostic with
214 /// one or more associated diagnostics, those diagnostics are all
215 /// assigned a single group ID.
216 pub group_id: usize,
217 /// Whether this diagnostic is the primary diagnostic for its group.
218 ///
219 /// In a given group, the primary diagnostic is the top-level diagnostic
220 /// returned by the language server. The non-primary diagnostics are the
221 /// associated diagnostics.
222 pub is_primary: bool,
223 /// Whether this diagnostic is considered to originate from an analysis of
224 /// files on disk, as opposed to any unsaved buffer contents. This is a
225 /// property of a given diagnostic source, and is configured for a given
226 /// language server via the [`LspAdapter::disk_based_diagnostic_sources`](crate::LspAdapter::disk_based_diagnostic_sources) method
227 /// for the language server.
228 pub is_disk_based: bool,
229 /// Whether this diagnostic marks unnecessary code.
230 pub is_unnecessary: bool,
231 /// Quick separation of diagnostics groups based by their source.
232 pub source_kind: DiagnosticSourceKind,
233 /// Data from language server that produced this diagnostic. Passed back to the LS when we request code actions for this diagnostic.
234 pub data: Option<Value>,
235 /// Whether to underline the corresponding text range in the editor.
236 pub underline: bool,
237}
238
239#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)]
240pub enum DiagnosticSourceKind {
241 Pulled,
242 Pushed,
243 Other,
244}
245
246/// An operation used to synchronize this buffer with its other replicas.
247#[derive(Clone, Debug, PartialEq)]
248pub enum Operation {
249 /// A text operation.
250 Buffer(text::Operation),
251
252 /// An update to the buffer's diagnostics.
253 UpdateDiagnostics {
254 /// The id of the language server that produced the new diagnostics.
255 server_id: LanguageServerId,
256 /// The diagnostics.
257 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
258 /// The buffer's lamport timestamp.
259 lamport_timestamp: clock::Lamport,
260 },
261
262 /// An update to the most recent selections in this buffer.
263 UpdateSelections {
264 /// The selections.
265 selections: Arc<[Selection<Anchor>]>,
266 /// The buffer's lamport timestamp.
267 lamport_timestamp: clock::Lamport,
268 /// Whether the selections are in 'line mode'.
269 line_mode: bool,
270 /// The [`CursorShape`] associated with these selections.
271 cursor_shape: CursorShape,
272 },
273
274 /// An update to the characters that should trigger autocompletion
275 /// for this buffer.
276 UpdateCompletionTriggers {
277 /// The characters that trigger autocompletion.
278 triggers: Vec<String>,
279 /// The buffer's lamport timestamp.
280 lamport_timestamp: clock::Lamport,
281 /// The language server ID.
282 server_id: LanguageServerId,
283 },
284}
285
286/// An event that occurs in a buffer.
287#[derive(Clone, Debug, PartialEq)]
288pub enum BufferEvent {
289 /// The buffer was changed in a way that must be
290 /// propagated to its other replicas.
291 Operation {
292 operation: Operation,
293 is_local: bool,
294 },
295 /// The buffer was edited.
296 Edited,
297 /// The buffer's `dirty` bit changed.
298 DirtyChanged,
299 /// The buffer was saved.
300 Saved,
301 /// The buffer's file was changed on disk.
302 FileHandleChanged,
303 /// The buffer was reloaded.
304 Reloaded,
305 /// The buffer is in need of a reload
306 ReloadNeeded,
307 /// The buffer's language was changed.
308 LanguageChanged,
309 /// The buffer's syntax trees were updated.
310 Reparsed,
311 /// The buffer's diagnostics were updated.
312 DiagnosticsUpdated,
313 /// The buffer gained or lost editing capabilities.
314 CapabilityChanged,
315 /// The buffer was explicitly requested to close.
316 Closed,
317 /// The buffer was discarded when closing.
318 Discarded,
319}
320
321/// The file associated with a buffer.
322pub trait File: Send + Sync + Any {
323 /// Returns the [`LocalFile`] associated with this file, if the
324 /// file is local.
325 fn as_local(&self) -> Option<&dyn LocalFile>;
326
327 /// Returns whether this file is local.
328 fn is_local(&self) -> bool {
329 self.as_local().is_some()
330 }
331
332 /// Returns whether the file is new, exists in storage, or has been deleted. Includes metadata
333 /// only available in some states, such as modification time.
334 fn disk_state(&self) -> DiskState;
335
336 /// Returns the path of this file relative to the worktree's root directory.
337 fn path(&self) -> &Arc<Path>;
338
339 /// Returns the path of this file relative to the worktree's parent directory (this means it
340 /// includes the name of the worktree's root folder).
341 fn full_path(&self, cx: &App) -> PathBuf;
342
343 /// Returns the last component of this handle's absolute path. If this handle refers to the root
344 /// of its worktree, then this method will return the name of the worktree itself.
345 fn file_name<'a>(&'a self, cx: &'a App) -> &'a OsStr;
346
347 /// Returns the id of the worktree to which this file belongs.
348 ///
349 /// This is needed for looking up project-specific settings.
350 fn worktree_id(&self, cx: &App) -> WorktreeId;
351
352 /// Converts this file into a protobuf message.
353 fn to_proto(&self, cx: &App) -> rpc::proto::File;
354
355 /// Return whether Zed considers this to be a private file.
356 fn is_private(&self) -> bool;
357}
358
359/// The file's storage status - whether it's stored (`Present`), and if so when it was last
360/// modified. In the case where the file is not stored, it can be either `New` or `Deleted`. In the
361/// UI these two states are distinguished. For example, the buffer tab does not display a deletion
362/// indicator for new files.
363#[derive(Copy, Clone, Debug, PartialEq)]
364pub enum DiskState {
365 /// File created in Zed that has not been saved.
366 New,
367 /// File present on the filesystem.
368 Present { mtime: MTime },
369 /// Deleted file that was previously present.
370 Deleted,
371}
372
373impl DiskState {
374 /// Returns the file's last known modification time on disk.
375 pub fn mtime(self) -> Option<MTime> {
376 match self {
377 DiskState::New => None,
378 DiskState::Present { mtime } => Some(mtime),
379 DiskState::Deleted => None,
380 }
381 }
382
383 pub fn exists(&self) -> bool {
384 match self {
385 DiskState::New => false,
386 DiskState::Present { .. } => true,
387 DiskState::Deleted => false,
388 }
389 }
390}
391
392/// The file associated with a buffer, in the case where the file is on the local disk.
393pub trait LocalFile: File {
394 /// Returns the absolute path of this file
395 fn abs_path(&self, cx: &App) -> PathBuf;
396
397 /// Loads the file contents from disk and returns them as a UTF-8 encoded string.
398 fn load(&self, cx: &App) -> Task<Result<String>>;
399
400 /// Loads the file's contents from disk.
401 fn load_bytes(&self, cx: &App) -> Task<Result<Vec<u8>>>;
402}
403
404/// The auto-indent behavior associated with an editing operation.
405/// For some editing operations, each affected line of text has its
406/// indentation recomputed. For other operations, the entire block
407/// of edited text is adjusted uniformly.
408#[derive(Clone, Debug)]
409pub enum AutoindentMode {
410 /// Indent each line of inserted text.
411 EachLine,
412 /// Apply the same indentation adjustment to all of the lines
413 /// in a given insertion.
414 Block {
415 /// The original indentation column of the first line of each
416 /// insertion, if it has been copied.
417 ///
418 /// Knowing this makes it possible to preserve the relative indentation
419 /// of every line in the insertion from when it was copied.
420 ///
421 /// If the original indent column is `a`, and the first line of insertion
422 /// is then auto-indented to column `b`, then every other line of
423 /// the insertion will be auto-indented to column `b - a`
424 original_indent_columns: Vec<Option<u32>>,
425 },
426}
427
428#[derive(Clone)]
429struct AutoindentRequest {
430 before_edit: BufferSnapshot,
431 entries: Vec<AutoindentRequestEntry>,
432 is_block_mode: bool,
433 ignore_empty_lines: bool,
434}
435
436#[derive(Debug, Clone)]
437struct AutoindentRequestEntry {
438 /// A range of the buffer whose indentation should be adjusted.
439 range: Range<Anchor>,
440 /// Whether or not these lines should be considered brand new, for the
441 /// purpose of auto-indent. When text is not new, its indentation will
442 /// only be adjusted if the suggested indentation level has *changed*
443 /// since the edit was made.
444 first_line_is_new: bool,
445 indent_size: IndentSize,
446 original_indent_column: Option<u32>,
447}
448
449#[derive(Debug)]
450struct IndentSuggestion {
451 basis_row: u32,
452 delta: Ordering,
453 within_error: bool,
454}
455
456struct BufferChunkHighlights<'a> {
457 captures: SyntaxMapCaptures<'a>,
458 next_capture: Option<SyntaxMapCapture<'a>>,
459 stack: Vec<(usize, HighlightId)>,
460 highlight_maps: Vec<HighlightMap>,
461}
462
463/// An iterator that yields chunks of a buffer's text, along with their
464/// syntax highlights and diagnostic status.
465pub struct BufferChunks<'a> {
466 buffer_snapshot: Option<&'a BufferSnapshot>,
467 range: Range<usize>,
468 chunks: text::Chunks<'a>,
469 diagnostic_endpoints: Option<Peekable<vec::IntoIter<DiagnosticEndpoint>>>,
470 error_depth: usize,
471 warning_depth: usize,
472 information_depth: usize,
473 hint_depth: usize,
474 unnecessary_depth: usize,
475 underline: bool,
476 highlights: Option<BufferChunkHighlights<'a>>,
477}
478
479/// A chunk of a buffer's text, along with its syntax highlight and
480/// diagnostic status.
481#[derive(Clone, Debug, Default)]
482pub struct Chunk<'a> {
483 /// The text of the chunk.
484 pub text: &'a str,
485 /// The syntax highlighting style of the chunk.
486 pub syntax_highlight_id: Option<HighlightId>,
487 /// The highlight style that has been applied to this chunk in
488 /// the editor.
489 pub highlight_style: Option<HighlightStyle>,
490 /// The severity of diagnostic associated with this chunk, if any.
491 pub diagnostic_severity: Option<DiagnosticSeverity>,
492 /// Whether this chunk of text is marked as unnecessary.
493 pub is_unnecessary: bool,
494 /// Whether this chunk of text was originally a tab character.
495 pub is_tab: bool,
496 /// Whether this chunk of text was originally a tab character.
497 pub is_inlay: bool,
498 /// Whether to underline the corresponding text range in the editor.
499 pub underline: bool,
500}
501
502/// A set of edits to a given version of a buffer, computed asynchronously.
503#[derive(Debug)]
504pub struct Diff {
505 pub base_version: clock::Global,
506 pub line_ending: LineEnding,
507 pub edits: Vec<(Range<usize>, Arc<str>)>,
508}
509
510#[derive(Debug, Clone, Copy)]
511pub(crate) struct DiagnosticEndpoint {
512 offset: usize,
513 is_start: bool,
514 underline: bool,
515 severity: DiagnosticSeverity,
516 is_unnecessary: bool,
517}
518
519/// A class of characters, used for characterizing a run of text.
520#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
521pub enum CharKind {
522 /// Whitespace.
523 Whitespace,
524 /// Punctuation.
525 Punctuation,
526 /// Word.
527 Word,
528}
529
530/// A runnable is a set of data about a region that could be resolved into a task
531pub struct Runnable {
532 pub tags: SmallVec<[RunnableTag; 1]>,
533 pub language: Arc<Language>,
534 pub buffer: BufferId,
535}
536
537#[derive(Default, Clone, Debug)]
538pub struct HighlightedText {
539 pub text: SharedString,
540 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
541}
542
543#[derive(Default, Debug)]
544struct HighlightedTextBuilder {
545 pub text: String,
546 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
547}
548
549impl HighlightedText {
550 pub fn from_buffer_range<T: ToOffset>(
551 range: Range<T>,
552 snapshot: &text::BufferSnapshot,
553 syntax_snapshot: &SyntaxSnapshot,
554 override_style: Option<HighlightStyle>,
555 syntax_theme: &SyntaxTheme,
556 ) -> Self {
557 let mut highlighted_text = HighlightedTextBuilder::default();
558 highlighted_text.add_text_from_buffer_range(
559 range,
560 snapshot,
561 syntax_snapshot,
562 override_style,
563 syntax_theme,
564 );
565 highlighted_text.build()
566 }
567
568 pub fn to_styled_text(&self, default_style: &TextStyle) -> StyledText {
569 gpui::StyledText::new(self.text.clone())
570 .with_default_highlights(default_style, self.highlights.iter().cloned())
571 }
572
573 /// Returns the first line without leading whitespace unless highlighted
574 /// and a boolean indicating if there are more lines after
575 pub fn first_line_preview(self) -> (Self, bool) {
576 let newline_ix = self.text.find('\n').unwrap_or(self.text.len());
577 let first_line = &self.text[..newline_ix];
578
579 // Trim leading whitespace, unless an edit starts prior to it.
580 let mut preview_start_ix = first_line.len() - first_line.trim_start().len();
581 if let Some((first_highlight_range, _)) = self.highlights.first() {
582 preview_start_ix = preview_start_ix.min(first_highlight_range.start);
583 }
584
585 let preview_text = &first_line[preview_start_ix..];
586 let preview_highlights = self
587 .highlights
588 .into_iter()
589 .take_while(|(range, _)| range.start < newline_ix)
590 .filter_map(|(mut range, highlight)| {
591 range.start = range.start.saturating_sub(preview_start_ix);
592 range.end = range.end.saturating_sub(preview_start_ix).min(newline_ix);
593 if range.is_empty() {
594 None
595 } else {
596 Some((range, highlight))
597 }
598 });
599
600 let preview = Self {
601 text: SharedString::new(preview_text),
602 highlights: preview_highlights.collect(),
603 };
604
605 (preview, self.text.len() > newline_ix)
606 }
607}
608
609impl HighlightedTextBuilder {
610 pub fn build(self) -> HighlightedText {
611 HighlightedText {
612 text: self.text.into(),
613 highlights: self.highlights,
614 }
615 }
616
617 pub fn add_text_from_buffer_range<T: ToOffset>(
618 &mut self,
619 range: Range<T>,
620 snapshot: &text::BufferSnapshot,
621 syntax_snapshot: &SyntaxSnapshot,
622 override_style: Option<HighlightStyle>,
623 syntax_theme: &SyntaxTheme,
624 ) {
625 let range = range.to_offset(snapshot);
626 for chunk in Self::highlighted_chunks(range, snapshot, syntax_snapshot) {
627 let start = self.text.len();
628 self.text.push_str(chunk.text);
629 let end = self.text.len();
630
631 if let Some(mut highlight_style) = chunk
632 .syntax_highlight_id
633 .and_then(|id| id.style(syntax_theme))
634 {
635 if let Some(override_style) = override_style {
636 highlight_style.highlight(override_style);
637 }
638 self.highlights.push((start..end, highlight_style));
639 } else if let Some(override_style) = override_style {
640 self.highlights.push((start..end, override_style));
641 }
642 }
643 }
644
645 fn highlighted_chunks<'a>(
646 range: Range<usize>,
647 snapshot: &'a text::BufferSnapshot,
648 syntax_snapshot: &'a SyntaxSnapshot,
649 ) -> BufferChunks<'a> {
650 let captures = syntax_snapshot.captures(range.clone(), snapshot, |grammar| {
651 grammar.highlights_query.as_ref()
652 });
653
654 let highlight_maps = captures
655 .grammars()
656 .iter()
657 .map(|grammar| grammar.highlight_map())
658 .collect();
659
660 BufferChunks::new(
661 snapshot.as_rope(),
662 range,
663 Some((captures, highlight_maps)),
664 false,
665 None,
666 )
667 }
668}
669
670#[derive(Clone)]
671pub struct EditPreview {
672 old_snapshot: text::BufferSnapshot,
673 applied_edits_snapshot: text::BufferSnapshot,
674 syntax_snapshot: SyntaxSnapshot,
675}
676
677impl EditPreview {
678 pub fn highlight_edits(
679 &self,
680 current_snapshot: &BufferSnapshot,
681 edits: &[(Range<Anchor>, String)],
682 include_deletions: bool,
683 cx: &App,
684 ) -> HighlightedText {
685 let Some(visible_range_in_preview_snapshot) = self.compute_visible_range(edits) else {
686 return HighlightedText::default();
687 };
688
689 let mut highlighted_text = HighlightedTextBuilder::default();
690
691 let mut offset_in_preview_snapshot = visible_range_in_preview_snapshot.start;
692
693 let insertion_highlight_style = HighlightStyle {
694 background_color: Some(cx.theme().status().created_background),
695 ..Default::default()
696 };
697 let deletion_highlight_style = HighlightStyle {
698 background_color: Some(cx.theme().status().deleted_background),
699 ..Default::default()
700 };
701 let syntax_theme = cx.theme().syntax();
702
703 for (range, edit_text) in edits {
704 let edit_new_end_in_preview_snapshot = range
705 .end
706 .bias_right(&self.old_snapshot)
707 .to_offset(&self.applied_edits_snapshot);
708 let edit_start_in_preview_snapshot = edit_new_end_in_preview_snapshot - edit_text.len();
709
710 let unchanged_range_in_preview_snapshot =
711 offset_in_preview_snapshot..edit_start_in_preview_snapshot;
712 if !unchanged_range_in_preview_snapshot.is_empty() {
713 highlighted_text.add_text_from_buffer_range(
714 unchanged_range_in_preview_snapshot,
715 &self.applied_edits_snapshot,
716 &self.syntax_snapshot,
717 None,
718 &syntax_theme,
719 );
720 }
721
722 let range_in_current_snapshot = range.to_offset(current_snapshot);
723 if include_deletions && !range_in_current_snapshot.is_empty() {
724 highlighted_text.add_text_from_buffer_range(
725 range_in_current_snapshot,
726 ¤t_snapshot.text,
727 ¤t_snapshot.syntax,
728 Some(deletion_highlight_style),
729 &syntax_theme,
730 );
731 }
732
733 if !edit_text.is_empty() {
734 highlighted_text.add_text_from_buffer_range(
735 edit_start_in_preview_snapshot..edit_new_end_in_preview_snapshot,
736 &self.applied_edits_snapshot,
737 &self.syntax_snapshot,
738 Some(insertion_highlight_style),
739 &syntax_theme,
740 );
741 }
742
743 offset_in_preview_snapshot = edit_new_end_in_preview_snapshot;
744 }
745
746 highlighted_text.add_text_from_buffer_range(
747 offset_in_preview_snapshot..visible_range_in_preview_snapshot.end,
748 &self.applied_edits_snapshot,
749 &self.syntax_snapshot,
750 None,
751 &syntax_theme,
752 );
753
754 highlighted_text.build()
755 }
756
757 fn compute_visible_range(&self, edits: &[(Range<Anchor>, String)]) -> Option<Range<usize>> {
758 let (first, _) = edits.first()?;
759 let (last, _) = edits.last()?;
760
761 let start = first
762 .start
763 .bias_left(&self.old_snapshot)
764 .to_point(&self.applied_edits_snapshot);
765 let end = last
766 .end
767 .bias_right(&self.old_snapshot)
768 .to_point(&self.applied_edits_snapshot);
769
770 // Ensure that the first line of the first edit and the last line of the last edit are always fully visible
771 let range = Point::new(start.row, 0)
772 ..Point::new(end.row, self.applied_edits_snapshot.line_len(end.row));
773
774 Some(range.to_offset(&self.applied_edits_snapshot))
775 }
776}
777
778#[derive(Clone, Debug, PartialEq, Eq)]
779pub struct BracketMatch {
780 pub open_range: Range<usize>,
781 pub close_range: Range<usize>,
782 pub newline_only: bool,
783}
784
785impl Buffer {
786 /// Create a new buffer with the given base text.
787 pub fn local<T: Into<String>>(base_text: T, cx: &Context<Self>) -> Self {
788 Self::build(
789 TextBuffer::new(0, cx.entity_id().as_non_zero_u64().into(), base_text.into()),
790 None,
791 Capability::ReadWrite,
792 )
793 }
794
795 /// Create a new buffer with the given base text that has proper line endings and other normalization applied.
796 pub fn local_normalized(
797 base_text_normalized: Rope,
798 line_ending: LineEnding,
799 cx: &Context<Self>,
800 ) -> Self {
801 Self::build(
802 TextBuffer::new_normalized(
803 0,
804 cx.entity_id().as_non_zero_u64().into(),
805 line_ending,
806 base_text_normalized,
807 ),
808 None,
809 Capability::ReadWrite,
810 )
811 }
812
813 /// Create a new buffer that is a replica of a remote buffer.
814 pub fn remote(
815 remote_id: BufferId,
816 replica_id: ReplicaId,
817 capability: Capability,
818 base_text: impl Into<String>,
819 ) -> Self {
820 Self::build(
821 TextBuffer::new(replica_id, remote_id, base_text.into()),
822 None,
823 capability,
824 )
825 }
826
827 /// Create a new buffer that is a replica of a remote buffer, populating its
828 /// state from the given protobuf message.
829 pub fn from_proto(
830 replica_id: ReplicaId,
831 capability: Capability,
832 message: proto::BufferState,
833 file: Option<Arc<dyn File>>,
834 ) -> Result<Self> {
835 let buffer_id = BufferId::new(message.id).context("Could not deserialize buffer_id")?;
836 let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text);
837 let mut this = Self::build(buffer, file, capability);
838 this.text.set_line_ending(proto::deserialize_line_ending(
839 rpc::proto::LineEnding::from_i32(message.line_ending).context("missing line_ending")?,
840 ));
841 this.saved_version = proto::deserialize_version(&message.saved_version);
842 this.saved_mtime = message.saved_mtime.map(|time| time.into());
843 Ok(this)
844 }
845
846 /// Serialize the buffer's state to a protobuf message.
847 pub fn to_proto(&self, cx: &App) -> proto::BufferState {
848 proto::BufferState {
849 id: self.remote_id().into(),
850 file: self.file.as_ref().map(|f| f.to_proto(cx)),
851 base_text: self.base_text().to_string(),
852 line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
853 saved_version: proto::serialize_version(&self.saved_version),
854 saved_mtime: self.saved_mtime.map(|time| time.into()),
855 }
856 }
857
858 /// Serialize as protobufs all of the changes to the buffer since the given version.
859 pub fn serialize_ops(
860 &self,
861 since: Option<clock::Global>,
862 cx: &App,
863 ) -> Task<Vec<proto::Operation>> {
864 let mut operations = Vec::new();
865 operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
866
867 operations.extend(self.remote_selections.iter().map(|(_, set)| {
868 proto::serialize_operation(&Operation::UpdateSelections {
869 selections: set.selections.clone(),
870 lamport_timestamp: set.lamport_timestamp,
871 line_mode: set.line_mode,
872 cursor_shape: set.cursor_shape,
873 })
874 }));
875
876 for (server_id, diagnostics) in &self.diagnostics {
877 operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
878 lamport_timestamp: self.diagnostics_timestamp,
879 server_id: *server_id,
880 diagnostics: diagnostics.iter().cloned().collect(),
881 }));
882 }
883
884 for (server_id, completions) in &self.completion_triggers_per_language_server {
885 operations.push(proto::serialize_operation(
886 &Operation::UpdateCompletionTriggers {
887 triggers: completions.iter().cloned().collect(),
888 lamport_timestamp: self.completion_triggers_timestamp,
889 server_id: *server_id,
890 },
891 ));
892 }
893
894 let text_operations = self.text.operations().clone();
895 cx.background_spawn(async move {
896 let since = since.unwrap_or_default();
897 operations.extend(
898 text_operations
899 .iter()
900 .filter(|(_, op)| !since.observed(op.timestamp()))
901 .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
902 );
903 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
904 operations
905 })
906 }
907
908 /// Assign a language to the buffer, returning the buffer.
909 pub fn with_language(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
910 self.set_language(Some(language), cx);
911 self
912 }
913
914 /// Returns the [`Capability`] of this buffer.
915 pub fn capability(&self) -> Capability {
916 self.capability
917 }
918
919 /// Whether this buffer can only be read.
920 pub fn read_only(&self) -> bool {
921 self.capability == Capability::ReadOnly
922 }
923
924 /// Builds a [`Buffer`] with the given underlying [`TextBuffer`], diff base, [`File`] and [`Capability`].
925 pub fn build(buffer: TextBuffer, file: Option<Arc<dyn File>>, capability: Capability) -> Self {
926 let saved_mtime = file.as_ref().and_then(|file| file.disk_state().mtime());
927 let snapshot = buffer.snapshot();
928 let syntax_map = Mutex::new(SyntaxMap::new(&snapshot));
929 Self {
930 saved_mtime,
931 saved_version: buffer.version(),
932 preview_version: buffer.version(),
933 reload_task: None,
934 transaction_depth: 0,
935 was_dirty_before_starting_transaction: None,
936 has_unsaved_edits: Cell::new((buffer.version(), false)),
937 text: buffer,
938 branch_state: None,
939 file,
940 capability,
941 syntax_map,
942 reparse: None,
943 non_text_state_update_count: 0,
944 sync_parse_timeout: Duration::from_millis(1),
945 parse_status: watch::channel(ParseStatus::Idle),
946 autoindent_requests: Default::default(),
947 pending_autoindent: Default::default(),
948 language: None,
949 remote_selections: Default::default(),
950 diagnostics: Default::default(),
951 diagnostics_timestamp: Default::default(),
952 completion_triggers: Default::default(),
953 completion_triggers_per_language_server: Default::default(),
954 completion_triggers_timestamp: Default::default(),
955 deferred_ops: OperationQueue::new(),
956 has_conflict: false,
957 change_bits: Default::default(),
958 _subscriptions: Vec::new(),
959 }
960 }
961
962 pub fn build_snapshot(
963 text: Rope,
964 language: Option<Arc<Language>>,
965 language_registry: Option<Arc<LanguageRegistry>>,
966 cx: &mut App,
967 ) -> impl Future<Output = BufferSnapshot> + use<> {
968 let entity_id = cx.reserve_entity::<Self>().entity_id();
969 let buffer_id = entity_id.as_non_zero_u64().into();
970 async move {
971 let text =
972 TextBuffer::new_normalized(0, buffer_id, Default::default(), text).snapshot();
973 let mut syntax = SyntaxMap::new(&text).snapshot();
974 if let Some(language) = language.clone() {
975 let text = text.clone();
976 let language = language.clone();
977 let language_registry = language_registry.clone();
978 syntax.reparse(&text, language_registry, language);
979 }
980 BufferSnapshot {
981 text,
982 syntax,
983 file: None,
984 diagnostics: Default::default(),
985 remote_selections: Default::default(),
986 language,
987 non_text_state_update_count: 0,
988 }
989 }
990 }
991
992 pub fn build_empty_snapshot(cx: &mut App) -> BufferSnapshot {
993 let entity_id = cx.reserve_entity::<Self>().entity_id();
994 let buffer_id = entity_id.as_non_zero_u64().into();
995 let text =
996 TextBuffer::new_normalized(0, buffer_id, Default::default(), Rope::new()).snapshot();
997 let syntax = SyntaxMap::new(&text).snapshot();
998 BufferSnapshot {
999 text,
1000 syntax,
1001 file: None,
1002 diagnostics: Default::default(),
1003 remote_selections: Default::default(),
1004 language: None,
1005 non_text_state_update_count: 0,
1006 }
1007 }
1008
1009 #[cfg(any(test, feature = "test-support"))]
1010 pub fn build_snapshot_sync(
1011 text: Rope,
1012 language: Option<Arc<Language>>,
1013 language_registry: Option<Arc<LanguageRegistry>>,
1014 cx: &mut App,
1015 ) -> BufferSnapshot {
1016 let entity_id = cx.reserve_entity::<Self>().entity_id();
1017 let buffer_id = entity_id.as_non_zero_u64().into();
1018 let text = TextBuffer::new_normalized(0, buffer_id, Default::default(), text).snapshot();
1019 let mut syntax = SyntaxMap::new(&text).snapshot();
1020 if let Some(language) = language.clone() {
1021 let text = text.clone();
1022 let language = language.clone();
1023 let language_registry = language_registry.clone();
1024 syntax.reparse(&text, language_registry, language);
1025 }
1026 BufferSnapshot {
1027 text,
1028 syntax,
1029 file: None,
1030 diagnostics: Default::default(),
1031 remote_selections: Default::default(),
1032 language,
1033 non_text_state_update_count: 0,
1034 }
1035 }
1036
1037 /// Retrieve a snapshot of the buffer's current state. This is computationally
1038 /// cheap, and allows reading from the buffer on a background thread.
1039 pub fn snapshot(&self) -> BufferSnapshot {
1040 let text = self.text.snapshot();
1041 let mut syntax_map = self.syntax_map.lock();
1042 syntax_map.interpolate(&text);
1043 let syntax = syntax_map.snapshot();
1044
1045 BufferSnapshot {
1046 text,
1047 syntax,
1048 file: self.file.clone(),
1049 remote_selections: self.remote_selections.clone(),
1050 diagnostics: self.diagnostics.clone(),
1051 language: self.language.clone(),
1052 non_text_state_update_count: self.non_text_state_update_count,
1053 }
1054 }
1055
1056 pub fn branch(&mut self, cx: &mut Context<Self>) -> Entity<Self> {
1057 let this = cx.entity();
1058 cx.new(|cx| {
1059 let mut branch = Self {
1060 branch_state: Some(BufferBranchState {
1061 base_buffer: this.clone(),
1062 merged_operations: Default::default(),
1063 }),
1064 language: self.language.clone(),
1065 has_conflict: self.has_conflict,
1066 has_unsaved_edits: Cell::new(self.has_unsaved_edits.get_mut().clone()),
1067 _subscriptions: vec![cx.subscribe(&this, Self::on_base_buffer_event)],
1068 ..Self::build(self.text.branch(), self.file.clone(), self.capability())
1069 };
1070 if let Some(language_registry) = self.language_registry() {
1071 branch.set_language_registry(language_registry);
1072 }
1073
1074 // Reparse the branch buffer so that we get syntax highlighting immediately.
1075 branch.reparse(cx);
1076
1077 branch
1078 })
1079 }
1080
1081 pub fn preview_edits(
1082 &self,
1083 edits: Arc<[(Range<Anchor>, String)]>,
1084 cx: &App,
1085 ) -> Task<EditPreview> {
1086 let registry = self.language_registry();
1087 let language = self.language().cloned();
1088 let old_snapshot = self.text.snapshot();
1089 let mut branch_buffer = self.text.branch();
1090 let mut syntax_snapshot = self.syntax_map.lock().snapshot();
1091 cx.background_spawn(async move {
1092 if !edits.is_empty() {
1093 if let Some(language) = language.clone() {
1094 syntax_snapshot.reparse(&old_snapshot, registry.clone(), language);
1095 }
1096
1097 branch_buffer.edit(edits.iter().cloned());
1098 let snapshot = branch_buffer.snapshot();
1099 syntax_snapshot.interpolate(&snapshot);
1100
1101 if let Some(language) = language {
1102 syntax_snapshot.reparse(&snapshot, registry, language);
1103 }
1104 }
1105 EditPreview {
1106 old_snapshot,
1107 applied_edits_snapshot: branch_buffer.snapshot(),
1108 syntax_snapshot,
1109 }
1110 })
1111 }
1112
1113 /// Applies all of the changes in this buffer that intersect any of the
1114 /// given `ranges` to its base buffer.
1115 ///
1116 /// If `ranges` is empty, then all changes will be applied. This buffer must
1117 /// be a branch buffer to call this method.
1118 pub fn merge_into_base(&mut self, ranges: Vec<Range<usize>>, cx: &mut Context<Self>) {
1119 let Some(base_buffer) = self.base_buffer() else {
1120 debug_panic!("not a branch buffer");
1121 return;
1122 };
1123
1124 let mut ranges = if ranges.is_empty() {
1125 &[0..usize::MAX]
1126 } else {
1127 ranges.as_slice()
1128 }
1129 .into_iter()
1130 .peekable();
1131
1132 let mut edits = Vec::new();
1133 for edit in self.edits_since::<usize>(&base_buffer.read(cx).version()) {
1134 let mut is_included = false;
1135 while let Some(range) = ranges.peek() {
1136 if range.end < edit.new.start {
1137 ranges.next().unwrap();
1138 } else {
1139 if range.start <= edit.new.end {
1140 is_included = true;
1141 }
1142 break;
1143 }
1144 }
1145
1146 if is_included {
1147 edits.push((
1148 edit.old.clone(),
1149 self.text_for_range(edit.new.clone()).collect::<String>(),
1150 ));
1151 }
1152 }
1153
1154 let operation = base_buffer.update(cx, |base_buffer, cx| {
1155 // cx.emit(BufferEvent::DiffBaseChanged);
1156 base_buffer.edit(edits, None, cx)
1157 });
1158
1159 if let Some(operation) = operation {
1160 if let Some(BufferBranchState {
1161 merged_operations, ..
1162 }) = &mut self.branch_state
1163 {
1164 merged_operations.push(operation);
1165 }
1166 }
1167 }
1168
1169 fn on_base_buffer_event(
1170 &mut self,
1171 _: Entity<Buffer>,
1172 event: &BufferEvent,
1173 cx: &mut Context<Self>,
1174 ) {
1175 let BufferEvent::Operation { operation, .. } = event else {
1176 return;
1177 };
1178 let Some(BufferBranchState {
1179 merged_operations, ..
1180 }) = &mut self.branch_state
1181 else {
1182 return;
1183 };
1184
1185 let mut operation_to_undo = None;
1186 if let Operation::Buffer(text::Operation::Edit(operation)) = &operation {
1187 if let Ok(ix) = merged_operations.binary_search(&operation.timestamp) {
1188 merged_operations.remove(ix);
1189 operation_to_undo = Some(operation.timestamp);
1190 }
1191 }
1192
1193 self.apply_ops([operation.clone()], cx);
1194
1195 if let Some(timestamp) = operation_to_undo {
1196 let counts = [(timestamp, u32::MAX)].into_iter().collect();
1197 self.undo_operations(counts, cx);
1198 }
1199 }
1200
1201 #[cfg(test)]
1202 pub(crate) fn as_text_snapshot(&self) -> &text::BufferSnapshot {
1203 &self.text
1204 }
1205
1206 /// Retrieve a snapshot of the buffer's raw text, without any
1207 /// language-related state like the syntax tree or diagnostics.
1208 pub fn text_snapshot(&self) -> text::BufferSnapshot {
1209 self.text.snapshot()
1210 }
1211
1212 /// The file associated with the buffer, if any.
1213 pub fn file(&self) -> Option<&Arc<dyn File>> {
1214 self.file.as_ref()
1215 }
1216
1217 /// The version of the buffer that was last saved or reloaded from disk.
1218 pub fn saved_version(&self) -> &clock::Global {
1219 &self.saved_version
1220 }
1221
1222 /// The mtime of the buffer's file when the buffer was last saved or reloaded from disk.
1223 pub fn saved_mtime(&self) -> Option<MTime> {
1224 self.saved_mtime
1225 }
1226
1227 /// Assign a language to the buffer.
1228 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1229 self.non_text_state_update_count += 1;
1230 self.syntax_map.lock().clear(&self.text);
1231 self.language = language;
1232 self.was_changed();
1233 self.reparse(cx);
1234 cx.emit(BufferEvent::LanguageChanged);
1235 }
1236
1237 /// Assign a language registry to the buffer. This allows the buffer to retrieve
1238 /// other languages if parts of the buffer are written in different languages.
1239 pub fn set_language_registry(&self, language_registry: Arc<LanguageRegistry>) {
1240 self.syntax_map
1241 .lock()
1242 .set_language_registry(language_registry);
1243 }
1244
1245 pub fn language_registry(&self) -> Option<Arc<LanguageRegistry>> {
1246 self.syntax_map.lock().language_registry()
1247 }
1248
1249 /// Assign the buffer a new [`Capability`].
1250 pub fn set_capability(&mut self, capability: Capability, cx: &mut Context<Self>) {
1251 self.capability = capability;
1252 cx.emit(BufferEvent::CapabilityChanged)
1253 }
1254
1255 /// This method is called to signal that the buffer has been saved.
1256 pub fn did_save(
1257 &mut self,
1258 version: clock::Global,
1259 mtime: Option<MTime>,
1260 cx: &mut Context<Self>,
1261 ) {
1262 self.saved_version = version;
1263 self.has_unsaved_edits
1264 .set((self.saved_version().clone(), false));
1265 self.has_conflict = false;
1266 self.saved_mtime = mtime;
1267 self.was_changed();
1268 cx.emit(BufferEvent::Saved);
1269 cx.notify();
1270 }
1271
1272 /// This method is called to signal that the buffer has been discarded.
1273 pub fn discarded(&self, cx: &mut Context<Self>) {
1274 cx.emit(BufferEvent::Discarded);
1275 cx.notify();
1276 }
1277
1278 /// Reloads the contents of the buffer from disk.
1279 pub fn reload(&mut self, cx: &Context<Self>) -> oneshot::Receiver<Option<Transaction>> {
1280 let (tx, rx) = futures::channel::oneshot::channel();
1281 let prev_version = self.text.version();
1282 self.reload_task = Some(cx.spawn(async move |this, cx| {
1283 let Some((new_mtime, new_text)) = this.update(cx, |this, cx| {
1284 let file = this.file.as_ref()?.as_local()?;
1285
1286 Some((file.disk_state().mtime(), file.load(cx)))
1287 })?
1288 else {
1289 return Ok(());
1290 };
1291
1292 let new_text = new_text.await?;
1293 let diff = this
1294 .update(cx, |this, cx| this.diff(new_text.clone(), cx))?
1295 .await;
1296 this.update(cx, |this, cx| {
1297 if this.version() == diff.base_version {
1298 this.finalize_last_transaction();
1299 this.apply_diff(diff, cx);
1300 tx.send(this.finalize_last_transaction().cloned()).ok();
1301 this.has_conflict = false;
1302 this.did_reload(this.version(), this.line_ending(), new_mtime, cx);
1303 } else {
1304 if !diff.edits.is_empty()
1305 || this
1306 .edits_since::<usize>(&diff.base_version)
1307 .next()
1308 .is_some()
1309 {
1310 this.has_conflict = true;
1311 }
1312
1313 this.did_reload(prev_version, this.line_ending(), this.saved_mtime, cx);
1314 }
1315
1316 this.reload_task.take();
1317 })
1318 }));
1319 rx
1320 }
1321
1322 /// This method is called to signal that the buffer has been reloaded.
1323 pub fn did_reload(
1324 &mut self,
1325 version: clock::Global,
1326 line_ending: LineEnding,
1327 mtime: Option<MTime>,
1328 cx: &mut Context<Self>,
1329 ) {
1330 self.saved_version = version;
1331 self.has_unsaved_edits
1332 .set((self.saved_version.clone(), false));
1333 self.text.set_line_ending(line_ending);
1334 self.saved_mtime = mtime;
1335 cx.emit(BufferEvent::Reloaded);
1336 cx.notify();
1337 }
1338
1339 /// Updates the [`File`] backing this buffer. This should be called when
1340 /// the file has changed or has been deleted.
1341 pub fn file_updated(&mut self, new_file: Arc<dyn File>, cx: &mut Context<Self>) {
1342 let was_dirty = self.is_dirty();
1343 let mut file_changed = false;
1344
1345 if let Some(old_file) = self.file.as_ref() {
1346 if new_file.path() != old_file.path() {
1347 file_changed = true;
1348 }
1349
1350 let old_state = old_file.disk_state();
1351 let new_state = new_file.disk_state();
1352 if old_state != new_state {
1353 file_changed = true;
1354 if !was_dirty && matches!(new_state, DiskState::Present { .. }) {
1355 cx.emit(BufferEvent::ReloadNeeded)
1356 }
1357 }
1358 } else {
1359 file_changed = true;
1360 };
1361
1362 self.file = Some(new_file);
1363 if file_changed {
1364 self.was_changed();
1365 self.non_text_state_update_count += 1;
1366 if was_dirty != self.is_dirty() {
1367 cx.emit(BufferEvent::DirtyChanged);
1368 }
1369 cx.emit(BufferEvent::FileHandleChanged);
1370 cx.notify();
1371 }
1372 }
1373
1374 pub fn base_buffer(&self) -> Option<Entity<Self>> {
1375 Some(self.branch_state.as_ref()?.base_buffer.clone())
1376 }
1377
1378 /// Returns the primary [`Language`] assigned to this [`Buffer`].
1379 pub fn language(&self) -> Option<&Arc<Language>> {
1380 self.language.as_ref()
1381 }
1382
1383 /// Returns the [`Language`] at the given location.
1384 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
1385 let offset = position.to_offset(self);
1386 let mut is_first = true;
1387 let start_anchor = self.anchor_before(offset);
1388 let end_anchor = self.anchor_after(offset);
1389 self.syntax_map
1390 .lock()
1391 .layers_for_range(offset..offset, &self.text, false)
1392 .filter(|layer| {
1393 if is_first {
1394 is_first = false;
1395 return true;
1396 }
1397 let any_sub_ranges_contain_range = layer
1398 .included_sub_ranges
1399 .map(|sub_ranges| {
1400 sub_ranges.iter().any(|sub_range| {
1401 let is_before_start = sub_range.end.cmp(&start_anchor, self).is_lt();
1402 let is_after_end = sub_range.start.cmp(&end_anchor, self).is_gt();
1403 !is_before_start && !is_after_end
1404 })
1405 })
1406 .unwrap_or(true);
1407 let result = any_sub_ranges_contain_range;
1408 return result;
1409 })
1410 .last()
1411 .map(|info| info.language.clone())
1412 .or_else(|| self.language.clone())
1413 }
1414
1415 /// Returns each [`Language`] for the active syntax layers at the given location.
1416 pub fn languages_at<D: ToOffset>(&self, position: D) -> Vec<Arc<Language>> {
1417 let offset = position.to_offset(self);
1418 let mut languages: Vec<Arc<Language>> = self
1419 .syntax_map
1420 .lock()
1421 .layers_for_range(offset..offset, &self.text, false)
1422 .map(|info| info.language.clone())
1423 .collect();
1424
1425 if languages.is_empty() {
1426 if let Some(buffer_language) = self.language() {
1427 languages.push(buffer_language.clone());
1428 }
1429 }
1430
1431 languages
1432 }
1433
1434 /// An integer version number that accounts for all updates besides
1435 /// the buffer's text itself (which is versioned via a version vector).
1436 pub fn non_text_state_update_count(&self) -> usize {
1437 self.non_text_state_update_count
1438 }
1439
1440 /// Whether the buffer is being parsed in the background.
1441 #[cfg(any(test, feature = "test-support"))]
1442 pub fn is_parsing(&self) -> bool {
1443 self.reparse.is_some()
1444 }
1445
1446 /// Indicates whether the buffer contains any regions that may be
1447 /// written in a language that hasn't been loaded yet.
1448 pub fn contains_unknown_injections(&self) -> bool {
1449 self.syntax_map.lock().contains_unknown_injections()
1450 }
1451
1452 #[cfg(test)]
1453 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
1454 self.sync_parse_timeout = timeout;
1455 }
1456
1457 /// Called after an edit to synchronize the buffer's main parse tree with
1458 /// the buffer's new underlying state.
1459 ///
1460 /// Locks the syntax map and interpolates the edits since the last reparse
1461 /// into the foreground syntax tree.
1462 ///
1463 /// Then takes a stable snapshot of the syntax map before unlocking it.
1464 /// The snapshot with the interpolated edits is sent to a background thread,
1465 /// where we ask Tree-sitter to perform an incremental parse.
1466 ///
1467 /// Meanwhile, in the foreground, we block the main thread for up to 1ms
1468 /// waiting on the parse to complete. As soon as it completes, we proceed
1469 /// synchronously, unless a 1ms timeout elapses.
1470 ///
1471 /// If we time out waiting on the parse, we spawn a second task waiting
1472 /// until the parse does complete and return with the interpolated tree still
1473 /// in the foreground. When the background parse completes, call back into
1474 /// the main thread and assign the foreground parse state.
1475 ///
1476 /// If the buffer or grammar changed since the start of the background parse,
1477 /// initiate an additional reparse recursively. To avoid concurrent parses
1478 /// for the same buffer, we only initiate a new parse if we are not already
1479 /// parsing in the background.
1480 pub fn reparse(&mut self, cx: &mut Context<Self>) {
1481 if self.reparse.is_some() {
1482 return;
1483 }
1484 let language = if let Some(language) = self.language.clone() {
1485 language
1486 } else {
1487 return;
1488 };
1489
1490 let text = self.text_snapshot();
1491 let parsed_version = self.version();
1492
1493 let mut syntax_map = self.syntax_map.lock();
1494 syntax_map.interpolate(&text);
1495 let language_registry = syntax_map.language_registry();
1496 let mut syntax_snapshot = syntax_map.snapshot();
1497 drop(syntax_map);
1498
1499 let parse_task = cx.background_spawn({
1500 let language = language.clone();
1501 let language_registry = language_registry.clone();
1502 async move {
1503 syntax_snapshot.reparse(&text, language_registry, language);
1504 syntax_snapshot
1505 }
1506 });
1507
1508 self.parse_status.0.send(ParseStatus::Parsing).unwrap();
1509 match cx
1510 .background_executor()
1511 .block_with_timeout(self.sync_parse_timeout, parse_task)
1512 {
1513 Ok(new_syntax_snapshot) => {
1514 self.did_finish_parsing(new_syntax_snapshot, cx);
1515 self.reparse = None;
1516 }
1517 Err(parse_task) => {
1518 self.reparse = Some(cx.spawn(async move |this, cx| {
1519 let new_syntax_map = parse_task.await;
1520 this.update(cx, move |this, cx| {
1521 let grammar_changed =
1522 this.language.as_ref().map_or(true, |current_language| {
1523 !Arc::ptr_eq(&language, current_language)
1524 });
1525 let language_registry_changed = new_syntax_map
1526 .contains_unknown_injections()
1527 && language_registry.map_or(false, |registry| {
1528 registry.version() != new_syntax_map.language_registry_version()
1529 });
1530 let parse_again = language_registry_changed
1531 || grammar_changed
1532 || this.version.changed_since(&parsed_version);
1533 this.did_finish_parsing(new_syntax_map, cx);
1534 this.reparse = None;
1535 if parse_again {
1536 this.reparse(cx);
1537 }
1538 })
1539 .ok();
1540 }));
1541 }
1542 }
1543 }
1544
1545 fn did_finish_parsing(&mut self, syntax_snapshot: SyntaxSnapshot, cx: &mut Context<Self>) {
1546 self.was_changed();
1547 self.non_text_state_update_count += 1;
1548 self.syntax_map.lock().did_parse(syntax_snapshot);
1549 self.request_autoindent(cx);
1550 self.parse_status.0.send(ParseStatus::Idle).unwrap();
1551 cx.emit(BufferEvent::Reparsed);
1552 cx.notify();
1553 }
1554
1555 pub fn parse_status(&self) -> watch::Receiver<ParseStatus> {
1556 self.parse_status.1.clone()
1557 }
1558
1559 /// Assign to the buffer a set of diagnostics created by a given language server.
1560 pub fn update_diagnostics(
1561 &mut self,
1562 server_id: LanguageServerId,
1563 diagnostics: DiagnosticSet,
1564 cx: &mut Context<Self>,
1565 ) {
1566 let lamport_timestamp = self.text.lamport_clock.tick();
1567 let op = Operation::UpdateDiagnostics {
1568 server_id,
1569 diagnostics: diagnostics.iter().cloned().collect(),
1570 lamport_timestamp,
1571 };
1572 self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
1573 self.send_operation(op, true, cx);
1574 }
1575
1576 pub fn get_diagnostics(&self, server_id: LanguageServerId) -> Option<&DiagnosticSet> {
1577 let Ok(idx) = self.diagnostics.binary_search_by_key(&server_id, |v| v.0) else {
1578 return None;
1579 };
1580 Some(&self.diagnostics[idx].1)
1581 }
1582
1583 fn request_autoindent(&mut self, cx: &mut Context<Self>) {
1584 if let Some(indent_sizes) = self.compute_autoindents() {
1585 let indent_sizes = cx.background_spawn(indent_sizes);
1586 match cx
1587 .background_executor()
1588 .block_with_timeout(Duration::from_micros(500), indent_sizes)
1589 {
1590 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
1591 Err(indent_sizes) => {
1592 self.pending_autoindent = Some(cx.spawn(async move |this, cx| {
1593 let indent_sizes = indent_sizes.await;
1594 this.update(cx, |this, cx| {
1595 this.apply_autoindents(indent_sizes, cx);
1596 })
1597 .ok();
1598 }));
1599 }
1600 }
1601 } else {
1602 self.autoindent_requests.clear();
1603 }
1604 }
1605
1606 fn compute_autoindents(
1607 &self,
1608 ) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>> + use<>> {
1609 let max_rows_between_yields = 100;
1610 let snapshot = self.snapshot();
1611 if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
1612 return None;
1613 }
1614
1615 let autoindent_requests = self.autoindent_requests.clone();
1616 Some(async move {
1617 let mut indent_sizes = BTreeMap::<u32, (IndentSize, bool)>::new();
1618 for request in autoindent_requests {
1619 // Resolve each edited range to its row in the current buffer and in the
1620 // buffer before this batch of edits.
1621 let mut row_ranges = Vec::new();
1622 let mut old_to_new_rows = BTreeMap::new();
1623 let mut language_indent_sizes_by_new_row = Vec::new();
1624 for entry in &request.entries {
1625 let position = entry.range.start;
1626 let new_row = position.to_point(&snapshot).row;
1627 let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
1628 language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
1629
1630 if !entry.first_line_is_new {
1631 let old_row = position.to_point(&request.before_edit).row;
1632 old_to_new_rows.insert(old_row, new_row);
1633 }
1634 row_ranges.push((new_row..new_end_row, entry.original_indent_column));
1635 }
1636
1637 // Build a map containing the suggested indentation for each of the edited lines
1638 // with respect to the state of the buffer before these edits. This map is keyed
1639 // by the rows for these lines in the current state of the buffer.
1640 let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
1641 let old_edited_ranges =
1642 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
1643 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1644 let mut language_indent_size = IndentSize::default();
1645 for old_edited_range in old_edited_ranges {
1646 let suggestions = request
1647 .before_edit
1648 .suggest_autoindents(old_edited_range.clone())
1649 .into_iter()
1650 .flatten();
1651 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1652 if let Some(suggestion) = suggestion {
1653 let new_row = *old_to_new_rows.get(&old_row).unwrap();
1654
1655 // Find the indent size based on the language for this row.
1656 while let Some((row, size)) = language_indent_sizes.peek() {
1657 if *row > new_row {
1658 break;
1659 }
1660 language_indent_size = *size;
1661 language_indent_sizes.next();
1662 }
1663
1664 let suggested_indent = old_to_new_rows
1665 .get(&suggestion.basis_row)
1666 .and_then(|from_row| {
1667 Some(old_suggestions.get(from_row).copied()?.0)
1668 })
1669 .unwrap_or_else(|| {
1670 request
1671 .before_edit
1672 .indent_size_for_line(suggestion.basis_row)
1673 })
1674 .with_delta(suggestion.delta, language_indent_size);
1675 old_suggestions
1676 .insert(new_row, (suggested_indent, suggestion.within_error));
1677 }
1678 }
1679 yield_now().await;
1680 }
1681
1682 // Compute new suggestions for each line, but only include them in the result
1683 // if they differ from the old suggestion for that line.
1684 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1685 let mut language_indent_size = IndentSize::default();
1686 for (row_range, original_indent_column) in row_ranges {
1687 let new_edited_row_range = if request.is_block_mode {
1688 row_range.start..row_range.start + 1
1689 } else {
1690 row_range.clone()
1691 };
1692
1693 let suggestions = snapshot
1694 .suggest_autoindents(new_edited_row_range.clone())
1695 .into_iter()
1696 .flatten();
1697 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
1698 if let Some(suggestion) = suggestion {
1699 // Find the indent size based on the language for this row.
1700 while let Some((row, size)) = language_indent_sizes.peek() {
1701 if *row > new_row {
1702 break;
1703 }
1704 language_indent_size = *size;
1705 language_indent_sizes.next();
1706 }
1707
1708 let suggested_indent = indent_sizes
1709 .get(&suggestion.basis_row)
1710 .copied()
1711 .map(|e| e.0)
1712 .unwrap_or_else(|| {
1713 snapshot.indent_size_for_line(suggestion.basis_row)
1714 })
1715 .with_delta(suggestion.delta, language_indent_size);
1716
1717 if old_suggestions.get(&new_row).map_or(
1718 true,
1719 |(old_indentation, was_within_error)| {
1720 suggested_indent != *old_indentation
1721 && (!suggestion.within_error || *was_within_error)
1722 },
1723 ) {
1724 indent_sizes.insert(
1725 new_row,
1726 (suggested_indent, request.ignore_empty_lines),
1727 );
1728 }
1729 }
1730 }
1731
1732 if let (true, Some(original_indent_column)) =
1733 (request.is_block_mode, original_indent_column)
1734 {
1735 let new_indent =
1736 if let Some((indent, _)) = indent_sizes.get(&row_range.start) {
1737 *indent
1738 } else {
1739 snapshot.indent_size_for_line(row_range.start)
1740 };
1741 let delta = new_indent.len as i64 - original_indent_column as i64;
1742 if delta != 0 {
1743 for row in row_range.skip(1) {
1744 indent_sizes.entry(row).or_insert_with(|| {
1745 let mut size = snapshot.indent_size_for_line(row);
1746 if size.kind == new_indent.kind {
1747 match delta.cmp(&0) {
1748 Ordering::Greater => size.len += delta as u32,
1749 Ordering::Less => {
1750 size.len = size.len.saturating_sub(-delta as u32)
1751 }
1752 Ordering::Equal => {}
1753 }
1754 }
1755 (size, request.ignore_empty_lines)
1756 });
1757 }
1758 }
1759 }
1760
1761 yield_now().await;
1762 }
1763 }
1764
1765 indent_sizes
1766 .into_iter()
1767 .filter_map(|(row, (indent, ignore_empty_lines))| {
1768 if ignore_empty_lines && snapshot.line_len(row) == 0 {
1769 None
1770 } else {
1771 Some((row, indent))
1772 }
1773 })
1774 .collect()
1775 })
1776 }
1777
1778 fn apply_autoindents(
1779 &mut self,
1780 indent_sizes: BTreeMap<u32, IndentSize>,
1781 cx: &mut Context<Self>,
1782 ) {
1783 self.autoindent_requests.clear();
1784
1785 let edits: Vec<_> = indent_sizes
1786 .into_iter()
1787 .filter_map(|(row, indent_size)| {
1788 let current_size = indent_size_for_line(self, row);
1789 Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
1790 })
1791 .collect();
1792
1793 let preserve_preview = self.preserve_preview();
1794 self.edit(edits, None, cx);
1795 if preserve_preview {
1796 self.refresh_preview();
1797 }
1798 }
1799
1800 /// Create a minimal edit that will cause the given row to be indented
1801 /// with the given size. After applying this edit, the length of the line
1802 /// will always be at least `new_size.len`.
1803 pub fn edit_for_indent_size_adjustment(
1804 row: u32,
1805 current_size: IndentSize,
1806 new_size: IndentSize,
1807 ) -> Option<(Range<Point>, String)> {
1808 if new_size.kind == current_size.kind {
1809 match new_size.len.cmp(¤t_size.len) {
1810 Ordering::Greater => {
1811 let point = Point::new(row, 0);
1812 Some((
1813 point..point,
1814 iter::repeat(new_size.char())
1815 .take((new_size.len - current_size.len) as usize)
1816 .collect::<String>(),
1817 ))
1818 }
1819
1820 Ordering::Less => Some((
1821 Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
1822 String::new(),
1823 )),
1824
1825 Ordering::Equal => None,
1826 }
1827 } else {
1828 Some((
1829 Point::new(row, 0)..Point::new(row, current_size.len),
1830 iter::repeat(new_size.char())
1831 .take(new_size.len as usize)
1832 .collect::<String>(),
1833 ))
1834 }
1835 }
1836
1837 /// Spawns a background task that asynchronously computes a `Diff` between the buffer's text
1838 /// and the given new text.
1839 pub fn diff(&self, mut new_text: String, cx: &App) -> Task<Diff> {
1840 let old_text = self.as_rope().clone();
1841 let base_version = self.version();
1842 cx.background_executor()
1843 .spawn_labeled(*BUFFER_DIFF_TASK, async move {
1844 let old_text = old_text.to_string();
1845 let line_ending = LineEnding::detect(&new_text);
1846 LineEnding::normalize(&mut new_text);
1847 let edits = text_diff(&old_text, &new_text);
1848 Diff {
1849 base_version,
1850 line_ending,
1851 edits,
1852 }
1853 })
1854 }
1855
1856 /// Spawns a background task that searches the buffer for any whitespace
1857 /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
1858 pub fn remove_trailing_whitespace(&self, cx: &App) -> Task<Diff> {
1859 let old_text = self.as_rope().clone();
1860 let line_ending = self.line_ending();
1861 let base_version = self.version();
1862 cx.background_spawn(async move {
1863 let ranges = trailing_whitespace_ranges(&old_text);
1864 let empty = Arc::<str>::from("");
1865 Diff {
1866 base_version,
1867 line_ending,
1868 edits: ranges
1869 .into_iter()
1870 .map(|range| (range, empty.clone()))
1871 .collect(),
1872 }
1873 })
1874 }
1875
1876 /// Ensures that the buffer ends with a single newline character, and
1877 /// no other whitespace.
1878 pub fn ensure_final_newline(&mut self, cx: &mut Context<Self>) {
1879 let len = self.len();
1880 let mut offset = len;
1881 for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
1882 let non_whitespace_len = chunk
1883 .trim_end_matches(|c: char| c.is_ascii_whitespace())
1884 .len();
1885 offset -= chunk.len();
1886 offset += non_whitespace_len;
1887 if non_whitespace_len != 0 {
1888 if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
1889 return;
1890 }
1891 break;
1892 }
1893 }
1894 self.edit([(offset..len, "\n")], None, cx);
1895 }
1896
1897 /// Applies a diff to the buffer. If the buffer has changed since the given diff was
1898 /// calculated, then adjust the diff to account for those changes, and discard any
1899 /// parts of the diff that conflict with those changes.
1900 pub fn apply_diff(&mut self, diff: Diff, cx: &mut Context<Self>) -> Option<TransactionId> {
1901 let snapshot = self.snapshot();
1902 let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
1903 let mut delta = 0;
1904 let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
1905 while let Some(edit_since) = edits_since.peek() {
1906 // If the edit occurs after a diff hunk, then it does not
1907 // affect that hunk.
1908 if edit_since.old.start > range.end {
1909 break;
1910 }
1911 // If the edit precedes the diff hunk, then adjust the hunk
1912 // to reflect the edit.
1913 else if edit_since.old.end < range.start {
1914 delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
1915 edits_since.next();
1916 }
1917 // If the edit intersects a diff hunk, then discard that hunk.
1918 else {
1919 return None;
1920 }
1921 }
1922
1923 let start = (range.start as i64 + delta) as usize;
1924 let end = (range.end as i64 + delta) as usize;
1925 Some((start..end, new_text))
1926 });
1927
1928 self.start_transaction();
1929 self.text.set_line_ending(diff.line_ending);
1930 self.edit(adjusted_edits, None, cx);
1931 self.end_transaction(cx)
1932 }
1933
1934 fn has_unsaved_edits(&self) -> bool {
1935 let (last_version, has_unsaved_edits) = self.has_unsaved_edits.take();
1936
1937 if last_version == self.version {
1938 self.has_unsaved_edits
1939 .set((last_version, has_unsaved_edits));
1940 return has_unsaved_edits;
1941 }
1942
1943 let has_edits = self.has_edits_since(&self.saved_version);
1944 self.has_unsaved_edits
1945 .set((self.version.clone(), has_edits));
1946 has_edits
1947 }
1948
1949 /// Checks if the buffer has unsaved changes.
1950 pub fn is_dirty(&self) -> bool {
1951 if self.capability == Capability::ReadOnly {
1952 return false;
1953 }
1954 if self.has_conflict {
1955 return true;
1956 }
1957 match self.file.as_ref().map(|f| f.disk_state()) {
1958 Some(DiskState::New) | Some(DiskState::Deleted) => {
1959 !self.is_empty() && self.has_unsaved_edits()
1960 }
1961 _ => self.has_unsaved_edits(),
1962 }
1963 }
1964
1965 /// Checks if the buffer and its file have both changed since the buffer
1966 /// was last saved or reloaded.
1967 pub fn has_conflict(&self) -> bool {
1968 if self.has_conflict {
1969 return true;
1970 }
1971 let Some(file) = self.file.as_ref() else {
1972 return false;
1973 };
1974 match file.disk_state() {
1975 DiskState::New => false,
1976 DiskState::Present { mtime } => match self.saved_mtime {
1977 Some(saved_mtime) => {
1978 mtime.bad_is_greater_than(saved_mtime) && self.has_unsaved_edits()
1979 }
1980 None => true,
1981 },
1982 DiskState::Deleted => false,
1983 }
1984 }
1985
1986 /// Gets a [`Subscription`] that tracks all of the changes to the buffer's text.
1987 pub fn subscribe(&mut self) -> Subscription {
1988 self.text.subscribe()
1989 }
1990
1991 /// Adds a bit to the list of bits that are set when the buffer's text changes.
1992 ///
1993 /// This allows downstream code to check if the buffer's text has changed without
1994 /// waiting for an effect cycle, which would be required if using eents.
1995 pub fn record_changes(&mut self, bit: rc::Weak<Cell<bool>>) {
1996 if let Err(ix) = self
1997 .change_bits
1998 .binary_search_by_key(&rc::Weak::as_ptr(&bit), rc::Weak::as_ptr)
1999 {
2000 self.change_bits.insert(ix, bit);
2001 }
2002 }
2003
2004 fn was_changed(&mut self) {
2005 self.change_bits.retain(|change_bit| {
2006 change_bit.upgrade().map_or(false, |bit| {
2007 bit.replace(true);
2008 true
2009 })
2010 });
2011 }
2012
2013 /// Starts a transaction, if one is not already in-progress. When undoing or
2014 /// redoing edits, all of the edits performed within a transaction are undone
2015 /// or redone together.
2016 pub fn start_transaction(&mut self) -> Option<TransactionId> {
2017 self.start_transaction_at(Instant::now())
2018 }
2019
2020 /// Starts a transaction, providing the current time. Subsequent transactions
2021 /// that occur within a short period of time will be grouped together. This
2022 /// is controlled by the buffer's undo grouping duration.
2023 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
2024 self.transaction_depth += 1;
2025 if self.was_dirty_before_starting_transaction.is_none() {
2026 self.was_dirty_before_starting_transaction = Some(self.is_dirty());
2027 }
2028 self.text.start_transaction_at(now)
2029 }
2030
2031 /// Terminates the current transaction, if this is the outermost transaction.
2032 pub fn end_transaction(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2033 self.end_transaction_at(Instant::now(), cx)
2034 }
2035
2036 /// Terminates the current transaction, providing the current time. Subsequent transactions
2037 /// that occur within a short period of time will be grouped together. This
2038 /// is controlled by the buffer's undo grouping duration.
2039 pub fn end_transaction_at(
2040 &mut self,
2041 now: Instant,
2042 cx: &mut Context<Self>,
2043 ) -> Option<TransactionId> {
2044 assert!(self.transaction_depth > 0);
2045 self.transaction_depth -= 1;
2046 let was_dirty = if self.transaction_depth == 0 {
2047 self.was_dirty_before_starting_transaction.take().unwrap()
2048 } else {
2049 false
2050 };
2051 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
2052 self.did_edit(&start_version, was_dirty, cx);
2053 Some(transaction_id)
2054 } else {
2055 None
2056 }
2057 }
2058
2059 /// Manually add a transaction to the buffer's undo history.
2060 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
2061 self.text.push_transaction(transaction, now);
2062 }
2063
2064 /// Prevent the last transaction from being grouped with any subsequent transactions,
2065 /// even if they occur with the buffer's undo grouping duration.
2066 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
2067 self.text.finalize_last_transaction()
2068 }
2069
2070 /// Manually group all changes since a given transaction.
2071 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
2072 self.text.group_until_transaction(transaction_id);
2073 }
2074
2075 /// Manually remove a transaction from the buffer's undo history
2076 pub fn forget_transaction(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
2077 self.text.forget_transaction(transaction_id)
2078 }
2079
2080 /// Retrieve a transaction from the buffer's undo history
2081 pub fn get_transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> {
2082 self.text.get_transaction(transaction_id)
2083 }
2084
2085 /// Manually merge two transactions in the buffer's undo history.
2086 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
2087 self.text.merge_transactions(transaction, destination);
2088 }
2089
2090 /// Waits for the buffer to receive operations with the given timestamps.
2091 pub fn wait_for_edits<It: IntoIterator<Item = clock::Lamport>>(
2092 &mut self,
2093 edit_ids: It,
2094 ) -> impl Future<Output = Result<()>> + use<It> {
2095 self.text.wait_for_edits(edit_ids)
2096 }
2097
2098 /// Waits for the buffer to receive the operations necessary for resolving the given anchors.
2099 pub fn wait_for_anchors<It: IntoIterator<Item = Anchor>>(
2100 &mut self,
2101 anchors: It,
2102 ) -> impl 'static + Future<Output = Result<()>> + use<It> {
2103 self.text.wait_for_anchors(anchors)
2104 }
2105
2106 /// Waits for the buffer to receive operations up to the given version.
2107 pub fn wait_for_version(
2108 &mut self,
2109 version: clock::Global,
2110 ) -> impl Future<Output = Result<()>> + use<> {
2111 self.text.wait_for_version(version)
2112 }
2113
2114 /// Forces all futures returned by [`Buffer::wait_for_version`], [`Buffer::wait_for_edits`], or
2115 /// [`Buffer::wait_for_version`] to resolve with an error.
2116 pub fn give_up_waiting(&mut self) {
2117 self.text.give_up_waiting();
2118 }
2119
2120 /// Stores a set of selections that should be broadcasted to all of the buffer's replicas.
2121 pub fn set_active_selections(
2122 &mut self,
2123 selections: Arc<[Selection<Anchor>]>,
2124 line_mode: bool,
2125 cursor_shape: CursorShape,
2126 cx: &mut Context<Self>,
2127 ) {
2128 let lamport_timestamp = self.text.lamport_clock.tick();
2129 self.remote_selections.insert(
2130 self.text.replica_id(),
2131 SelectionSet {
2132 selections: selections.clone(),
2133 lamport_timestamp,
2134 line_mode,
2135 cursor_shape,
2136 },
2137 );
2138 self.send_operation(
2139 Operation::UpdateSelections {
2140 selections,
2141 line_mode,
2142 lamport_timestamp,
2143 cursor_shape,
2144 },
2145 true,
2146 cx,
2147 );
2148 self.non_text_state_update_count += 1;
2149 cx.notify();
2150 }
2151
2152 /// Clears the selections, so that other replicas of the buffer do not see any selections for
2153 /// this replica.
2154 pub fn remove_active_selections(&mut self, cx: &mut Context<Self>) {
2155 if self
2156 .remote_selections
2157 .get(&self.text.replica_id())
2158 .map_or(true, |set| !set.selections.is_empty())
2159 {
2160 self.set_active_selections(Arc::default(), false, Default::default(), cx);
2161 }
2162 }
2163
2164 pub fn set_agent_selections(
2165 &mut self,
2166 selections: Arc<[Selection<Anchor>]>,
2167 line_mode: bool,
2168 cursor_shape: CursorShape,
2169 cx: &mut Context<Self>,
2170 ) {
2171 let lamport_timestamp = self.text.lamport_clock.tick();
2172 self.remote_selections.insert(
2173 AGENT_REPLICA_ID,
2174 SelectionSet {
2175 selections: selections.clone(),
2176 lamport_timestamp,
2177 line_mode,
2178 cursor_shape,
2179 },
2180 );
2181 self.non_text_state_update_count += 1;
2182 cx.notify();
2183 }
2184
2185 pub fn remove_agent_selections(&mut self, cx: &mut Context<Self>) {
2186 self.set_agent_selections(Arc::default(), false, Default::default(), cx);
2187 }
2188
2189 /// Replaces the buffer's entire text.
2190 pub fn set_text<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2191 where
2192 T: Into<Arc<str>>,
2193 {
2194 self.autoindent_requests.clear();
2195 self.edit([(0..self.len(), text)], None, cx)
2196 }
2197
2198 /// Appends the given text to the end of the buffer.
2199 pub fn append<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2200 where
2201 T: Into<Arc<str>>,
2202 {
2203 self.edit([(self.len()..self.len(), text)], None, cx)
2204 }
2205
2206 /// Applies the given edits to the buffer. Each edit is specified as a range of text to
2207 /// delete, and a string of text to insert at that location.
2208 ///
2209 /// If an [`AutoindentMode`] is provided, then the buffer will enqueue an auto-indent
2210 /// request for the edited ranges, which will be processed when the buffer finishes
2211 /// parsing.
2212 ///
2213 /// Parsing takes place at the end of a transaction, and may compute synchronously
2214 /// or asynchronously, depending on the changes.
2215 pub fn edit<I, S, T>(
2216 &mut self,
2217 edits_iter: I,
2218 autoindent_mode: Option<AutoindentMode>,
2219 cx: &mut Context<Self>,
2220 ) -> Option<clock::Lamport>
2221 where
2222 I: IntoIterator<Item = (Range<S>, T)>,
2223 S: ToOffset,
2224 T: Into<Arc<str>>,
2225 {
2226 // Skip invalid edits and coalesce contiguous ones.
2227 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
2228
2229 for (range, new_text) in edits_iter {
2230 let mut range = range.start.to_offset(self)..range.end.to_offset(self);
2231
2232 if range.start > range.end {
2233 mem::swap(&mut range.start, &mut range.end);
2234 }
2235 let new_text = new_text.into();
2236 if !new_text.is_empty() || !range.is_empty() {
2237 if let Some((prev_range, prev_text)) = edits.last_mut() {
2238 if prev_range.end >= range.start {
2239 prev_range.end = cmp::max(prev_range.end, range.end);
2240 *prev_text = format!("{prev_text}{new_text}").into();
2241 } else {
2242 edits.push((range, new_text));
2243 }
2244 } else {
2245 edits.push((range, new_text));
2246 }
2247 }
2248 }
2249 if edits.is_empty() {
2250 return None;
2251 }
2252
2253 self.start_transaction();
2254 self.pending_autoindent.take();
2255 let autoindent_request = autoindent_mode
2256 .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
2257
2258 let edit_operation = self.text.edit(edits.iter().cloned());
2259 let edit_id = edit_operation.timestamp();
2260
2261 if let Some((before_edit, mode)) = autoindent_request {
2262 let mut delta = 0isize;
2263 let entries = edits
2264 .into_iter()
2265 .enumerate()
2266 .zip(&edit_operation.as_edit().unwrap().new_text)
2267 .map(|((ix, (range, _)), new_text)| {
2268 let new_text_length = new_text.len();
2269 let old_start = range.start.to_point(&before_edit);
2270 let new_start = (delta + range.start as isize) as usize;
2271 let range_len = range.end - range.start;
2272 delta += new_text_length as isize - range_len as isize;
2273
2274 // Decide what range of the insertion to auto-indent, and whether
2275 // the first line of the insertion should be considered a newly-inserted line
2276 // or an edit to an existing line.
2277 let mut range_of_insertion_to_indent = 0..new_text_length;
2278 let mut first_line_is_new = true;
2279
2280 let old_line_start = before_edit.indent_size_for_line(old_start.row).len;
2281 let old_line_end = before_edit.line_len(old_start.row);
2282
2283 if old_start.column > old_line_start {
2284 first_line_is_new = false;
2285 }
2286
2287 if !new_text.contains('\n')
2288 && (old_start.column + (range_len as u32) < old_line_end
2289 || old_line_end == old_line_start)
2290 {
2291 first_line_is_new = false;
2292 }
2293
2294 // When inserting text starting with a newline, avoid auto-indenting the
2295 // previous line.
2296 if new_text.starts_with('\n') {
2297 range_of_insertion_to_indent.start += 1;
2298 first_line_is_new = true;
2299 }
2300
2301 let mut original_indent_column = None;
2302 if let AutoindentMode::Block {
2303 original_indent_columns,
2304 } = &mode
2305 {
2306 original_indent_column = Some(if new_text.starts_with('\n') {
2307 indent_size_for_text(
2308 new_text[range_of_insertion_to_indent.clone()].chars(),
2309 )
2310 .len
2311 } else {
2312 original_indent_columns
2313 .get(ix)
2314 .copied()
2315 .flatten()
2316 .unwrap_or_else(|| {
2317 indent_size_for_text(
2318 new_text[range_of_insertion_to_indent.clone()].chars(),
2319 )
2320 .len
2321 })
2322 });
2323
2324 // Avoid auto-indenting the line after the edit.
2325 if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
2326 range_of_insertion_to_indent.end -= 1;
2327 }
2328 }
2329
2330 AutoindentRequestEntry {
2331 first_line_is_new,
2332 original_indent_column,
2333 indent_size: before_edit.language_indent_size_at(range.start, cx),
2334 range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
2335 ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
2336 }
2337 })
2338 .collect();
2339
2340 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2341 before_edit,
2342 entries,
2343 is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
2344 ignore_empty_lines: false,
2345 }));
2346 }
2347
2348 self.end_transaction(cx);
2349 self.send_operation(Operation::Buffer(edit_operation), true, cx);
2350 Some(edit_id)
2351 }
2352
2353 fn did_edit(&mut self, old_version: &clock::Global, was_dirty: bool, cx: &mut Context<Self>) {
2354 self.was_changed();
2355
2356 if self.edits_since::<usize>(old_version).next().is_none() {
2357 return;
2358 }
2359
2360 self.reparse(cx);
2361 cx.emit(BufferEvent::Edited);
2362 if was_dirty != self.is_dirty() {
2363 cx.emit(BufferEvent::DirtyChanged);
2364 }
2365 cx.notify();
2366 }
2367
2368 pub fn autoindent_ranges<I, T>(&mut self, ranges: I, cx: &mut Context<Self>)
2369 where
2370 I: IntoIterator<Item = Range<T>>,
2371 T: ToOffset + Copy,
2372 {
2373 let before_edit = self.snapshot();
2374 let entries = ranges
2375 .into_iter()
2376 .map(|range| AutoindentRequestEntry {
2377 range: before_edit.anchor_before(range.start)..before_edit.anchor_after(range.end),
2378 first_line_is_new: true,
2379 indent_size: before_edit.language_indent_size_at(range.start, cx),
2380 original_indent_column: None,
2381 })
2382 .collect();
2383 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2384 before_edit,
2385 entries,
2386 is_block_mode: false,
2387 ignore_empty_lines: true,
2388 }));
2389 self.request_autoindent(cx);
2390 }
2391
2392 // Inserts newlines at the given position to create an empty line, returning the start of the new line.
2393 // You can also request the insertion of empty lines above and below the line starting at the returned point.
2394 pub fn insert_empty_line(
2395 &mut self,
2396 position: impl ToPoint,
2397 space_above: bool,
2398 space_below: bool,
2399 cx: &mut Context<Self>,
2400 ) -> Point {
2401 let mut position = position.to_point(self);
2402
2403 self.start_transaction();
2404
2405 self.edit(
2406 [(position..position, "\n")],
2407 Some(AutoindentMode::EachLine),
2408 cx,
2409 );
2410
2411 if position.column > 0 {
2412 position += Point::new(1, 0);
2413 }
2414
2415 if !self.is_line_blank(position.row) {
2416 self.edit(
2417 [(position..position, "\n")],
2418 Some(AutoindentMode::EachLine),
2419 cx,
2420 );
2421 }
2422
2423 if space_above && position.row > 0 && !self.is_line_blank(position.row - 1) {
2424 self.edit(
2425 [(position..position, "\n")],
2426 Some(AutoindentMode::EachLine),
2427 cx,
2428 );
2429 position.row += 1;
2430 }
2431
2432 if space_below
2433 && (position.row == self.max_point().row || !self.is_line_blank(position.row + 1))
2434 {
2435 self.edit(
2436 [(position..position, "\n")],
2437 Some(AutoindentMode::EachLine),
2438 cx,
2439 );
2440 }
2441
2442 self.end_transaction(cx);
2443
2444 position
2445 }
2446
2447 /// Applies the given remote operations to the buffer.
2448 pub fn apply_ops<I: IntoIterator<Item = Operation>>(&mut self, ops: I, cx: &mut Context<Self>) {
2449 self.pending_autoindent.take();
2450 let was_dirty = self.is_dirty();
2451 let old_version = self.version.clone();
2452 let mut deferred_ops = Vec::new();
2453 let buffer_ops = ops
2454 .into_iter()
2455 .filter_map(|op| match op {
2456 Operation::Buffer(op) => Some(op),
2457 _ => {
2458 if self.can_apply_op(&op) {
2459 self.apply_op(op, cx);
2460 } else {
2461 deferred_ops.push(op);
2462 }
2463 None
2464 }
2465 })
2466 .collect::<Vec<_>>();
2467 for operation in buffer_ops.iter() {
2468 self.send_operation(Operation::Buffer(operation.clone()), false, cx);
2469 }
2470 self.text.apply_ops(buffer_ops);
2471 self.deferred_ops.insert(deferred_ops);
2472 self.flush_deferred_ops(cx);
2473 self.did_edit(&old_version, was_dirty, cx);
2474 // Notify independently of whether the buffer was edited as the operations could include a
2475 // selection update.
2476 cx.notify();
2477 }
2478
2479 fn flush_deferred_ops(&mut self, cx: &mut Context<Self>) {
2480 let mut deferred_ops = Vec::new();
2481 for op in self.deferred_ops.drain().iter().cloned() {
2482 if self.can_apply_op(&op) {
2483 self.apply_op(op, cx);
2484 } else {
2485 deferred_ops.push(op);
2486 }
2487 }
2488 self.deferred_ops.insert(deferred_ops);
2489 }
2490
2491 pub fn has_deferred_ops(&self) -> bool {
2492 !self.deferred_ops.is_empty() || self.text.has_deferred_ops()
2493 }
2494
2495 fn can_apply_op(&self, operation: &Operation) -> bool {
2496 match operation {
2497 Operation::Buffer(_) => {
2498 unreachable!("buffer operations should never be applied at this layer")
2499 }
2500 Operation::UpdateDiagnostics {
2501 diagnostics: diagnostic_set,
2502 ..
2503 } => diagnostic_set.iter().all(|diagnostic| {
2504 self.text.can_resolve(&diagnostic.range.start)
2505 && self.text.can_resolve(&diagnostic.range.end)
2506 }),
2507 Operation::UpdateSelections { selections, .. } => selections
2508 .iter()
2509 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
2510 Operation::UpdateCompletionTriggers { .. } => true,
2511 }
2512 }
2513
2514 fn apply_op(&mut self, operation: Operation, cx: &mut Context<Self>) {
2515 match operation {
2516 Operation::Buffer(_) => {
2517 unreachable!("buffer operations should never be applied at this layer")
2518 }
2519 Operation::UpdateDiagnostics {
2520 server_id,
2521 diagnostics: diagnostic_set,
2522 lamport_timestamp,
2523 } => {
2524 let snapshot = self.snapshot();
2525 self.apply_diagnostic_update(
2526 server_id,
2527 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
2528 lamport_timestamp,
2529 cx,
2530 );
2531 }
2532 Operation::UpdateSelections {
2533 selections,
2534 lamport_timestamp,
2535 line_mode,
2536 cursor_shape,
2537 } => {
2538 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id) {
2539 if set.lamport_timestamp > lamport_timestamp {
2540 return;
2541 }
2542 }
2543
2544 self.remote_selections.insert(
2545 lamport_timestamp.replica_id,
2546 SelectionSet {
2547 selections,
2548 lamport_timestamp,
2549 line_mode,
2550 cursor_shape,
2551 },
2552 );
2553 self.text.lamport_clock.observe(lamport_timestamp);
2554 self.non_text_state_update_count += 1;
2555 }
2556 Operation::UpdateCompletionTriggers {
2557 triggers,
2558 lamport_timestamp,
2559 server_id,
2560 } => {
2561 if triggers.is_empty() {
2562 self.completion_triggers_per_language_server
2563 .remove(&server_id);
2564 self.completion_triggers = self
2565 .completion_triggers_per_language_server
2566 .values()
2567 .flat_map(|triggers| triggers.into_iter().cloned())
2568 .collect();
2569 } else {
2570 self.completion_triggers_per_language_server
2571 .insert(server_id, triggers.iter().cloned().collect());
2572 self.completion_triggers.extend(triggers);
2573 }
2574 self.text.lamport_clock.observe(lamport_timestamp);
2575 }
2576 }
2577 }
2578
2579 fn apply_diagnostic_update(
2580 &mut self,
2581 server_id: LanguageServerId,
2582 diagnostics: DiagnosticSet,
2583 lamport_timestamp: clock::Lamport,
2584 cx: &mut Context<Self>,
2585 ) {
2586 if lamport_timestamp > self.diagnostics_timestamp {
2587 let ix = self.diagnostics.binary_search_by_key(&server_id, |e| e.0);
2588 if diagnostics.is_empty() {
2589 if let Ok(ix) = ix {
2590 self.diagnostics.remove(ix);
2591 }
2592 } else {
2593 match ix {
2594 Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)),
2595 Ok(ix) => self.diagnostics[ix].1 = diagnostics,
2596 };
2597 }
2598 self.diagnostics_timestamp = lamport_timestamp;
2599 self.non_text_state_update_count += 1;
2600 self.text.lamport_clock.observe(lamport_timestamp);
2601 cx.notify();
2602 cx.emit(BufferEvent::DiagnosticsUpdated);
2603 }
2604 }
2605
2606 fn send_operation(&mut self, operation: Operation, is_local: bool, cx: &mut Context<Self>) {
2607 self.was_changed();
2608 cx.emit(BufferEvent::Operation {
2609 operation,
2610 is_local,
2611 });
2612 }
2613
2614 /// Removes the selections for a given peer.
2615 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut Context<Self>) {
2616 self.remote_selections.remove(&replica_id);
2617 cx.notify();
2618 }
2619
2620 /// Undoes the most recent transaction.
2621 pub fn undo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2622 let was_dirty = self.is_dirty();
2623 let old_version = self.version.clone();
2624
2625 if let Some((transaction_id, operation)) = self.text.undo() {
2626 self.send_operation(Operation::Buffer(operation), true, cx);
2627 self.did_edit(&old_version, was_dirty, cx);
2628 Some(transaction_id)
2629 } else {
2630 None
2631 }
2632 }
2633
2634 /// Manually undoes a specific transaction in the buffer's undo history.
2635 pub fn undo_transaction(
2636 &mut self,
2637 transaction_id: TransactionId,
2638 cx: &mut Context<Self>,
2639 ) -> bool {
2640 let was_dirty = self.is_dirty();
2641 let old_version = self.version.clone();
2642 if let Some(operation) = self.text.undo_transaction(transaction_id) {
2643 self.send_operation(Operation::Buffer(operation), true, cx);
2644 self.did_edit(&old_version, was_dirty, cx);
2645 true
2646 } else {
2647 false
2648 }
2649 }
2650
2651 /// Manually undoes all changes after a given transaction in the buffer's undo history.
2652 pub fn undo_to_transaction(
2653 &mut self,
2654 transaction_id: TransactionId,
2655 cx: &mut Context<Self>,
2656 ) -> bool {
2657 let was_dirty = self.is_dirty();
2658 let old_version = self.version.clone();
2659
2660 let operations = self.text.undo_to_transaction(transaction_id);
2661 let undone = !operations.is_empty();
2662 for operation in operations {
2663 self.send_operation(Operation::Buffer(operation), true, cx);
2664 }
2665 if undone {
2666 self.did_edit(&old_version, was_dirty, cx)
2667 }
2668 undone
2669 }
2670
2671 pub fn undo_operations(&mut self, counts: HashMap<Lamport, u32>, cx: &mut Context<Buffer>) {
2672 let was_dirty = self.is_dirty();
2673 let operation = self.text.undo_operations(counts);
2674 let old_version = self.version.clone();
2675 self.send_operation(Operation::Buffer(operation), true, cx);
2676 self.did_edit(&old_version, was_dirty, cx);
2677 }
2678
2679 /// Manually redoes a specific transaction in the buffer's redo history.
2680 pub fn redo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2681 let was_dirty = self.is_dirty();
2682 let old_version = self.version.clone();
2683
2684 if let Some((transaction_id, operation)) = self.text.redo() {
2685 self.send_operation(Operation::Buffer(operation), true, cx);
2686 self.did_edit(&old_version, was_dirty, cx);
2687 Some(transaction_id)
2688 } else {
2689 None
2690 }
2691 }
2692
2693 /// Manually undoes all changes until a given transaction in the buffer's redo history.
2694 pub fn redo_to_transaction(
2695 &mut self,
2696 transaction_id: TransactionId,
2697 cx: &mut Context<Self>,
2698 ) -> bool {
2699 let was_dirty = self.is_dirty();
2700 let old_version = self.version.clone();
2701
2702 let operations = self.text.redo_to_transaction(transaction_id);
2703 let redone = !operations.is_empty();
2704 for operation in operations {
2705 self.send_operation(Operation::Buffer(operation), true, cx);
2706 }
2707 if redone {
2708 self.did_edit(&old_version, was_dirty, cx)
2709 }
2710 redone
2711 }
2712
2713 /// Override current completion triggers with the user-provided completion triggers.
2714 pub fn set_completion_triggers(
2715 &mut self,
2716 server_id: LanguageServerId,
2717 triggers: BTreeSet<String>,
2718 cx: &mut Context<Self>,
2719 ) {
2720 self.completion_triggers_timestamp = self.text.lamport_clock.tick();
2721 if triggers.is_empty() {
2722 self.completion_triggers_per_language_server
2723 .remove(&server_id);
2724 self.completion_triggers = self
2725 .completion_triggers_per_language_server
2726 .values()
2727 .flat_map(|triggers| triggers.into_iter().cloned())
2728 .collect();
2729 } else {
2730 self.completion_triggers_per_language_server
2731 .insert(server_id, triggers.clone());
2732 self.completion_triggers.extend(triggers.iter().cloned());
2733 }
2734 self.send_operation(
2735 Operation::UpdateCompletionTriggers {
2736 triggers: triggers.into_iter().collect(),
2737 lamport_timestamp: self.completion_triggers_timestamp,
2738 server_id,
2739 },
2740 true,
2741 cx,
2742 );
2743 cx.notify();
2744 }
2745
2746 /// Returns a list of strings which trigger a completion menu for this language.
2747 /// Usually this is driven by LSP server which returns a list of trigger characters for completions.
2748 pub fn completion_triggers(&self) -> &BTreeSet<String> {
2749 &self.completion_triggers
2750 }
2751
2752 /// Call this directly after performing edits to prevent the preview tab
2753 /// from being dismissed by those edits. It causes `should_dismiss_preview`
2754 /// to return false until there are additional edits.
2755 pub fn refresh_preview(&mut self) {
2756 self.preview_version = self.version.clone();
2757 }
2758
2759 /// Whether we should preserve the preview status of a tab containing this buffer.
2760 pub fn preserve_preview(&self) -> bool {
2761 !self.has_edits_since(&self.preview_version)
2762 }
2763}
2764
2765#[doc(hidden)]
2766#[cfg(any(test, feature = "test-support"))]
2767impl Buffer {
2768 pub fn edit_via_marked_text(
2769 &mut self,
2770 marked_string: &str,
2771 autoindent_mode: Option<AutoindentMode>,
2772 cx: &mut Context<Self>,
2773 ) {
2774 let edits = self.edits_for_marked_text(marked_string);
2775 self.edit(edits, autoindent_mode, cx);
2776 }
2777
2778 pub fn set_group_interval(&mut self, group_interval: Duration) {
2779 self.text.set_group_interval(group_interval);
2780 }
2781
2782 pub fn randomly_edit<T>(&mut self, rng: &mut T, old_range_count: usize, cx: &mut Context<Self>)
2783 where
2784 T: rand::Rng,
2785 {
2786 let mut edits: Vec<(Range<usize>, String)> = Vec::new();
2787 let mut last_end = None;
2788 for _ in 0..old_range_count {
2789 if last_end.map_or(false, |last_end| last_end >= self.len()) {
2790 break;
2791 }
2792
2793 let new_start = last_end.map_or(0, |last_end| last_end + 1);
2794 let mut range = self.random_byte_range(new_start, rng);
2795 if rng.gen_bool(0.2) {
2796 mem::swap(&mut range.start, &mut range.end);
2797 }
2798 last_end = Some(range.end);
2799
2800 let new_text_len = rng.gen_range(0..10);
2801 let mut new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
2802 new_text = new_text.to_uppercase();
2803
2804 edits.push((range, new_text));
2805 }
2806 log::info!("mutating buffer {} with {:?}", self.replica_id(), edits);
2807 self.edit(edits, None, cx);
2808 }
2809
2810 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut Context<Self>) {
2811 let was_dirty = self.is_dirty();
2812 let old_version = self.version.clone();
2813
2814 let ops = self.text.randomly_undo_redo(rng);
2815 if !ops.is_empty() {
2816 for op in ops {
2817 self.send_operation(Operation::Buffer(op), true, cx);
2818 self.did_edit(&old_version, was_dirty, cx);
2819 }
2820 }
2821 }
2822}
2823
2824impl EventEmitter<BufferEvent> for Buffer {}
2825
2826impl Deref for Buffer {
2827 type Target = TextBuffer;
2828
2829 fn deref(&self) -> &Self::Target {
2830 &self.text
2831 }
2832}
2833
2834impl BufferSnapshot {
2835 /// Returns [`IndentSize`] for a given line that respects user settings and
2836 /// language preferences.
2837 pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
2838 indent_size_for_line(self, row)
2839 }
2840
2841 /// Returns [`IndentSize`] for a given position that respects user settings
2842 /// and language preferences.
2843 pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &App) -> IndentSize {
2844 let settings = language_settings(
2845 self.language_at(position).map(|l| l.name()),
2846 self.file(),
2847 cx,
2848 );
2849 if settings.hard_tabs {
2850 IndentSize::tab()
2851 } else {
2852 IndentSize::spaces(settings.tab_size.get())
2853 }
2854 }
2855
2856 /// Retrieve the suggested indent size for all of the given rows. The unit of indentation
2857 /// is passed in as `single_indent_size`.
2858 pub fn suggested_indents(
2859 &self,
2860 rows: impl Iterator<Item = u32>,
2861 single_indent_size: IndentSize,
2862 ) -> BTreeMap<u32, IndentSize> {
2863 let mut result = BTreeMap::new();
2864
2865 for row_range in contiguous_ranges(rows, 10) {
2866 let suggestions = match self.suggest_autoindents(row_range.clone()) {
2867 Some(suggestions) => suggestions,
2868 _ => break,
2869 };
2870
2871 for (row, suggestion) in row_range.zip(suggestions) {
2872 let indent_size = if let Some(suggestion) = suggestion {
2873 result
2874 .get(&suggestion.basis_row)
2875 .copied()
2876 .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
2877 .with_delta(suggestion.delta, single_indent_size)
2878 } else {
2879 self.indent_size_for_line(row)
2880 };
2881
2882 result.insert(row, indent_size);
2883 }
2884 }
2885
2886 result
2887 }
2888
2889 fn suggest_autoindents(
2890 &self,
2891 row_range: Range<u32>,
2892 ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
2893 let config = &self.language.as_ref()?.config;
2894 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
2895 let significant_indentation = config.significant_indentation;
2896
2897 // Find the suggested indentation ranges based on the syntax tree.
2898 let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
2899 let end = Point::new(row_range.end, 0);
2900 let range = (start..end).to_offset(&self.text);
2901 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2902 Some(&grammar.indents_config.as_ref()?.query)
2903 });
2904 let indent_configs = matches
2905 .grammars()
2906 .iter()
2907 .map(|grammar| grammar.indents_config.as_ref().unwrap())
2908 .collect::<Vec<_>>();
2909
2910 let mut indent_ranges = Vec::<Range<Point>>::new();
2911 let mut outdent_positions = Vec::<Point>::new();
2912 while let Some(mat) = matches.peek() {
2913 let mut start: Option<Point> = None;
2914 let mut end: Option<Point> = None;
2915 let mut outdent: Option<Point> = None;
2916
2917 let config = &indent_configs[mat.grammar_index];
2918 for capture in mat.captures {
2919 if capture.index == config.indent_capture_ix {
2920 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
2921 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
2922 } else if Some(capture.index) == config.start_capture_ix {
2923 start = Some(Point::from_ts_point(capture.node.end_position()));
2924 } else if Some(capture.index) == config.end_capture_ix {
2925 end = Some(Point::from_ts_point(capture.node.start_position()));
2926 } else if Some(capture.index) == config.outdent_capture_ix {
2927 let point = Point::from_ts_point(capture.node.start_position());
2928 outdent.get_or_insert(point);
2929 outdent_positions.push(point);
2930 }
2931 }
2932
2933 matches.advance();
2934 // in case of significant indentation expand end to outdent position
2935 let end = if significant_indentation {
2936 outdent.or(end)
2937 } else {
2938 end
2939 };
2940 if let Some((start, end)) = start.zip(end) {
2941 if start.row == end.row && (!significant_indentation || start.column < end.column) {
2942 continue;
2943 }
2944 let range = start..end;
2945 match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
2946 Err(ix) => indent_ranges.insert(ix, range),
2947 Ok(ix) => {
2948 let prev_range = &mut indent_ranges[ix];
2949 prev_range.end = prev_range.end.max(range.end);
2950 }
2951 }
2952 }
2953 }
2954
2955 let mut error_ranges = Vec::<Range<Point>>::new();
2956 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2957 grammar.error_query.as_ref()
2958 });
2959 while let Some(mat) = matches.peek() {
2960 let node = mat.captures[0].node;
2961 let start = Point::from_ts_point(node.start_position());
2962 let end = Point::from_ts_point(node.end_position());
2963 let range = start..end;
2964 let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
2965 Ok(ix) | Err(ix) => ix,
2966 };
2967 let mut end_ix = ix;
2968 while let Some(existing_range) = error_ranges.get(end_ix) {
2969 if existing_range.end < end {
2970 end_ix += 1;
2971 } else {
2972 break;
2973 }
2974 }
2975 error_ranges.splice(ix..end_ix, [range]);
2976 matches.advance();
2977 }
2978
2979 // we don't use outdent positions to truncate in case of significant indentation
2980 // rather we use them to expand (handled above)
2981 if !significant_indentation {
2982 outdent_positions.sort();
2983 for outdent_position in outdent_positions {
2984 // find the innermost indent range containing this outdent_position
2985 // set its end to the outdent position
2986 if let Some(range_to_truncate) = indent_ranges
2987 .iter_mut()
2988 .filter(|indent_range| indent_range.contains(&outdent_position))
2989 .next_back()
2990 {
2991 range_to_truncate.end = outdent_position;
2992 }
2993 }
2994 }
2995
2996 // Find the suggested indentation increases and decreased based on regexes.
2997 let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
2998 self.for_each_line(
2999 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
3000 ..Point::new(row_range.end, 0),
3001 |row, line| {
3002 if config
3003 .decrease_indent_pattern
3004 .as_ref()
3005 .map_or(false, |regex| regex.is_match(line))
3006 {
3007 indent_change_rows.push((row, Ordering::Less));
3008 }
3009 if config
3010 .increase_indent_pattern
3011 .as_ref()
3012 .map_or(false, |regex| regex.is_match(line))
3013 {
3014 indent_change_rows.push((row + 1, Ordering::Greater));
3015 }
3016 },
3017 );
3018
3019 let mut indent_changes = indent_change_rows.into_iter().peekable();
3020 let mut prev_row = if config.auto_indent_using_last_non_empty_line {
3021 prev_non_blank_row.unwrap_or(0)
3022 } else {
3023 row_range.start.saturating_sub(1)
3024 };
3025 let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
3026 Some(row_range.map(move |row| {
3027 let row_start = Point::new(row, self.indent_size_for_line(row).len);
3028
3029 let mut indent_from_prev_row = false;
3030 let mut outdent_from_prev_row = false;
3031 let mut outdent_to_row = u32::MAX;
3032 let mut from_regex = false;
3033
3034 while let Some((indent_row, delta)) = indent_changes.peek() {
3035 match indent_row.cmp(&row) {
3036 Ordering::Equal => match delta {
3037 Ordering::Less => {
3038 from_regex = true;
3039 outdent_from_prev_row = true
3040 }
3041 Ordering::Greater => {
3042 indent_from_prev_row = true;
3043 from_regex = true
3044 }
3045 _ => {}
3046 },
3047
3048 Ordering::Greater => break,
3049 Ordering::Less => {}
3050 }
3051
3052 indent_changes.next();
3053 }
3054
3055 for range in &indent_ranges {
3056 if range.start.row >= row {
3057 break;
3058 }
3059 if range.start.row == prev_row && range.end > row_start {
3060 indent_from_prev_row = true;
3061 }
3062 if significant_indentation && self.is_line_blank(row) && range.start.row == prev_row
3063 {
3064 indent_from_prev_row = true;
3065 }
3066 if !significant_indentation || !self.is_line_blank(row) {
3067 if range.end > prev_row_start && range.end <= row_start {
3068 outdent_to_row = outdent_to_row.min(range.start.row);
3069 }
3070 }
3071 }
3072
3073 let within_error = error_ranges
3074 .iter()
3075 .any(|e| e.start.row < row && e.end > row_start);
3076
3077 let suggestion = if outdent_to_row == prev_row
3078 || (outdent_from_prev_row && indent_from_prev_row)
3079 {
3080 Some(IndentSuggestion {
3081 basis_row: prev_row,
3082 delta: Ordering::Equal,
3083 within_error: within_error && !from_regex,
3084 })
3085 } else if indent_from_prev_row {
3086 Some(IndentSuggestion {
3087 basis_row: prev_row,
3088 delta: Ordering::Greater,
3089 within_error: within_error && !from_regex,
3090 })
3091 } else if outdent_to_row < prev_row {
3092 Some(IndentSuggestion {
3093 basis_row: outdent_to_row,
3094 delta: Ordering::Equal,
3095 within_error: within_error && !from_regex,
3096 })
3097 } else if outdent_from_prev_row {
3098 Some(IndentSuggestion {
3099 basis_row: prev_row,
3100 delta: Ordering::Less,
3101 within_error: within_error && !from_regex,
3102 })
3103 } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
3104 {
3105 Some(IndentSuggestion {
3106 basis_row: prev_row,
3107 delta: Ordering::Equal,
3108 within_error: within_error && !from_regex,
3109 })
3110 } else {
3111 None
3112 };
3113
3114 prev_row = row;
3115 prev_row_start = row_start;
3116 suggestion
3117 }))
3118 }
3119
3120 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
3121 while row > 0 {
3122 row -= 1;
3123 if !self.is_line_blank(row) {
3124 return Some(row);
3125 }
3126 }
3127 None
3128 }
3129
3130 fn get_highlights(&self, range: Range<usize>) -> (SyntaxMapCaptures, Vec<HighlightMap>) {
3131 let captures = self.syntax.captures(range, &self.text, |grammar| {
3132 grammar.highlights_query.as_ref()
3133 });
3134 let highlight_maps = captures
3135 .grammars()
3136 .iter()
3137 .map(|grammar| grammar.highlight_map())
3138 .collect();
3139 (captures, highlight_maps)
3140 }
3141
3142 /// Iterates over chunks of text in the given range of the buffer. Text is chunked
3143 /// in an arbitrary way due to being stored in a [`Rope`](text::Rope). The text is also
3144 /// returned in chunks where each chunk has a single syntax highlighting style and
3145 /// diagnostic status.
3146 pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks {
3147 let range = range.start.to_offset(self)..range.end.to_offset(self);
3148
3149 let mut syntax = None;
3150 if language_aware {
3151 syntax = Some(self.get_highlights(range.clone()));
3152 }
3153 // We want to look at diagnostic spans only when iterating over language-annotated chunks.
3154 let diagnostics = language_aware;
3155 BufferChunks::new(self.text.as_rope(), range, syntax, diagnostics, Some(self))
3156 }
3157
3158 pub fn highlighted_text_for_range<T: ToOffset>(
3159 &self,
3160 range: Range<T>,
3161 override_style: Option<HighlightStyle>,
3162 syntax_theme: &SyntaxTheme,
3163 ) -> HighlightedText {
3164 HighlightedText::from_buffer_range(
3165 range,
3166 &self.text,
3167 &self.syntax,
3168 override_style,
3169 syntax_theme,
3170 )
3171 }
3172
3173 /// Invokes the given callback for each line of text in the given range of the buffer.
3174 /// Uses callback to avoid allocating a string for each line.
3175 fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
3176 let mut line = String::new();
3177 let mut row = range.start.row;
3178 for chunk in self
3179 .as_rope()
3180 .chunks_in_range(range.to_offset(self))
3181 .chain(["\n"])
3182 {
3183 for (newline_ix, text) in chunk.split('\n').enumerate() {
3184 if newline_ix > 0 {
3185 callback(row, &line);
3186 row += 1;
3187 line.clear();
3188 }
3189 line.push_str(text);
3190 }
3191 }
3192 }
3193
3194 /// Iterates over every [`SyntaxLayer`] in the buffer.
3195 pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayer> + '_ {
3196 self.syntax
3197 .layers_for_range(0..self.len(), &self.text, true)
3198 }
3199
3200 pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayer> {
3201 let offset = position.to_offset(self);
3202 self.syntax
3203 .layers_for_range(offset..offset, &self.text, false)
3204 .filter(|l| l.node().end_byte() > offset)
3205 .last()
3206 }
3207
3208 pub fn smallest_syntax_layer_containing<D: ToOffset>(
3209 &self,
3210 range: Range<D>,
3211 ) -> Option<SyntaxLayer> {
3212 let range = range.to_offset(self);
3213 return self
3214 .syntax
3215 .layers_for_range(range, &self.text, false)
3216 .max_by(|a, b| {
3217 if a.depth != b.depth {
3218 a.depth.cmp(&b.depth)
3219 } else if a.offset.0 != b.offset.0 {
3220 a.offset.0.cmp(&b.offset.0)
3221 } else {
3222 a.node().end_byte().cmp(&b.node().end_byte()).reverse()
3223 }
3224 });
3225 }
3226
3227 /// Returns the main [`Language`].
3228 pub fn language(&self) -> Option<&Arc<Language>> {
3229 self.language.as_ref()
3230 }
3231
3232 /// Returns the [`Language`] at the given location.
3233 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
3234 self.syntax_layer_at(position)
3235 .map(|info| info.language)
3236 .or(self.language.as_ref())
3237 }
3238
3239 /// Returns the settings for the language at the given location.
3240 pub fn settings_at<'a, D: ToOffset>(
3241 &'a self,
3242 position: D,
3243 cx: &'a App,
3244 ) -> Cow<'a, LanguageSettings> {
3245 language_settings(
3246 self.language_at(position).map(|l| l.name()),
3247 self.file.as_ref(),
3248 cx,
3249 )
3250 }
3251
3252 pub fn char_classifier_at<T: ToOffset>(&self, point: T) -> CharClassifier {
3253 CharClassifier::new(self.language_scope_at(point))
3254 }
3255
3256 /// Returns the [`LanguageScope`] at the given location.
3257 pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
3258 let offset = position.to_offset(self);
3259 let mut scope = None;
3260 let mut smallest_range_and_depth: Option<(Range<usize>, usize)> = None;
3261
3262 // Use the layer that has the smallest node intersecting the given point.
3263 for layer in self
3264 .syntax
3265 .layers_for_range(offset..offset, &self.text, false)
3266 {
3267 let mut cursor = layer.node().walk();
3268
3269 let mut range = None;
3270 loop {
3271 let child_range = cursor.node().byte_range();
3272 if !child_range.contains(&offset) {
3273 break;
3274 }
3275
3276 range = Some(child_range);
3277 if cursor.goto_first_child_for_byte(offset).is_none() {
3278 break;
3279 }
3280 }
3281
3282 if let Some(range) = range {
3283 if smallest_range_and_depth.as_ref().map_or(
3284 true,
3285 |(smallest_range, smallest_range_depth)| {
3286 if layer.depth > *smallest_range_depth {
3287 true
3288 } else if layer.depth == *smallest_range_depth {
3289 range.len() < smallest_range.len()
3290 } else {
3291 false
3292 }
3293 },
3294 ) {
3295 smallest_range_and_depth = Some((range, layer.depth));
3296 scope = Some(LanguageScope {
3297 language: layer.language.clone(),
3298 override_id: layer.override_id(offset, &self.text),
3299 });
3300 }
3301 }
3302 }
3303
3304 scope.or_else(|| {
3305 self.language.clone().map(|language| LanguageScope {
3306 language,
3307 override_id: None,
3308 })
3309 })
3310 }
3311
3312 /// Returns a tuple of the range and character kind of the word
3313 /// surrounding the given position.
3314 pub fn surrounding_word<T: ToOffset>(&self, start: T) -> (Range<usize>, Option<CharKind>) {
3315 let mut start = start.to_offset(self);
3316 let mut end = start;
3317 let mut next_chars = self.chars_at(start).take(128).peekable();
3318 let mut prev_chars = self.reversed_chars_at(start).take(128).peekable();
3319
3320 let classifier = self.char_classifier_at(start);
3321 let word_kind = cmp::max(
3322 prev_chars.peek().copied().map(|c| classifier.kind(c)),
3323 next_chars.peek().copied().map(|c| classifier.kind(c)),
3324 );
3325
3326 for ch in prev_chars {
3327 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3328 start -= ch.len_utf8();
3329 } else {
3330 break;
3331 }
3332 }
3333
3334 for ch in next_chars {
3335 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3336 end += ch.len_utf8();
3337 } else {
3338 break;
3339 }
3340 }
3341
3342 (start..end, word_kind)
3343 }
3344
3345 /// Returns the closest syntax node enclosing the given range.
3346 pub fn syntax_ancestor<'a, T: ToOffset>(
3347 &'a self,
3348 range: Range<T>,
3349 ) -> Option<tree_sitter::Node<'a>> {
3350 let range = range.start.to_offset(self)..range.end.to_offset(self);
3351 let mut result: Option<tree_sitter::Node<'a>> = None;
3352 'outer: for layer in self
3353 .syntax
3354 .layers_for_range(range.clone(), &self.text, true)
3355 {
3356 let mut cursor = layer.node().walk();
3357
3358 // Descend to the first leaf that touches the start of the range.
3359 //
3360 // If the range is non-empty and the current node ends exactly at the start,
3361 // move to the next sibling to find a node that extends beyond the start.
3362 //
3363 // If the range is empty and the current node starts after the range position,
3364 // move to the previous sibling to find the node that contains the position.
3365 while cursor.goto_first_child_for_byte(range.start).is_some() {
3366 if !range.is_empty() && cursor.node().end_byte() == range.start {
3367 cursor.goto_next_sibling();
3368 }
3369 if range.is_empty() && cursor.node().start_byte() > range.start {
3370 cursor.goto_previous_sibling();
3371 }
3372 }
3373
3374 // Ascend to the smallest ancestor that strictly contains the range.
3375 loop {
3376 let node_range = cursor.node().byte_range();
3377 if node_range.start <= range.start
3378 && node_range.end >= range.end
3379 && node_range.len() > range.len()
3380 {
3381 break;
3382 }
3383 if !cursor.goto_parent() {
3384 continue 'outer;
3385 }
3386 }
3387
3388 let left_node = cursor.node();
3389 let mut layer_result = left_node;
3390
3391 // For an empty range, try to find another node immediately to the right of the range.
3392 if left_node.end_byte() == range.start {
3393 let mut right_node = None;
3394 while !cursor.goto_next_sibling() {
3395 if !cursor.goto_parent() {
3396 break;
3397 }
3398 }
3399
3400 while cursor.node().start_byte() == range.start {
3401 right_node = Some(cursor.node());
3402 if !cursor.goto_first_child() {
3403 break;
3404 }
3405 }
3406
3407 // If there is a candidate node on both sides of the (empty) range, then
3408 // decide between the two by favoring a named node over an anonymous token.
3409 // If both nodes are the same in that regard, favor the right one.
3410 if let Some(right_node) = right_node {
3411 if right_node.is_named() || !left_node.is_named() {
3412 layer_result = right_node;
3413 }
3414 }
3415 }
3416
3417 if let Some(previous_result) = &result {
3418 if previous_result.byte_range().len() < layer_result.byte_range().len() {
3419 continue;
3420 }
3421 }
3422 result = Some(layer_result);
3423 }
3424
3425 result
3426 }
3427
3428 /// Returns the root syntax node within the given row
3429 pub fn syntax_root_ancestor(&self, position: Anchor) -> Option<tree_sitter::Node> {
3430 let start_offset = position.to_offset(self);
3431
3432 let row = self.summary_for_anchor::<text::PointUtf16>(&position).row as usize;
3433
3434 let layer = self
3435 .syntax
3436 .layers_for_range(start_offset..start_offset, &self.text, true)
3437 .next()?;
3438
3439 let mut cursor = layer.node().walk();
3440
3441 // Descend to the first leaf that touches the start of the range.
3442 while cursor.goto_first_child_for_byte(start_offset).is_some() {
3443 if cursor.node().end_byte() == start_offset {
3444 cursor.goto_next_sibling();
3445 }
3446 }
3447
3448 // Ascend to the root node within the same row.
3449 while cursor.goto_parent() {
3450 if cursor.node().start_position().row != row {
3451 break;
3452 }
3453 }
3454
3455 return Some(cursor.node());
3456 }
3457
3458 /// Returns the outline for the buffer.
3459 ///
3460 /// This method allows passing an optional [`SyntaxTheme`] to
3461 /// syntax-highlight the returned symbols.
3462 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Option<Outline<Anchor>> {
3463 self.outline_items_containing(0..self.len(), true, theme)
3464 .map(Outline::new)
3465 }
3466
3467 /// Returns all the symbols that contain the given position.
3468 ///
3469 /// This method allows passing an optional [`SyntaxTheme`] to
3470 /// syntax-highlight the returned symbols.
3471 pub fn symbols_containing<T: ToOffset>(
3472 &self,
3473 position: T,
3474 theme: Option<&SyntaxTheme>,
3475 ) -> Option<Vec<OutlineItem<Anchor>>> {
3476 let position = position.to_offset(self);
3477 let mut items = self.outline_items_containing(
3478 position.saturating_sub(1)..self.len().min(position + 1),
3479 false,
3480 theme,
3481 )?;
3482 let mut prev_depth = None;
3483 items.retain(|item| {
3484 let result = prev_depth.map_or(true, |prev_depth| item.depth > prev_depth);
3485 prev_depth = Some(item.depth);
3486 result
3487 });
3488 Some(items)
3489 }
3490
3491 pub fn outline_range_containing<T: ToOffset>(&self, range: Range<T>) -> Option<Range<Point>> {
3492 let range = range.to_offset(self);
3493 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3494 grammar.outline_config.as_ref().map(|c| &c.query)
3495 });
3496 let configs = matches
3497 .grammars()
3498 .iter()
3499 .map(|g| g.outline_config.as_ref().unwrap())
3500 .collect::<Vec<_>>();
3501
3502 while let Some(mat) = matches.peek() {
3503 let config = &configs[mat.grammar_index];
3504 let containing_item_node = maybe!({
3505 let item_node = mat.captures.iter().find_map(|cap| {
3506 if cap.index == config.item_capture_ix {
3507 Some(cap.node)
3508 } else {
3509 None
3510 }
3511 })?;
3512
3513 let item_byte_range = item_node.byte_range();
3514 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3515 None
3516 } else {
3517 Some(item_node)
3518 }
3519 });
3520
3521 if let Some(item_node) = containing_item_node {
3522 return Some(
3523 Point::from_ts_point(item_node.start_position())
3524 ..Point::from_ts_point(item_node.end_position()),
3525 );
3526 }
3527
3528 matches.advance();
3529 }
3530 None
3531 }
3532
3533 pub fn outline_items_containing<T: ToOffset>(
3534 &self,
3535 range: Range<T>,
3536 include_extra_context: bool,
3537 theme: Option<&SyntaxTheme>,
3538 ) -> Option<Vec<OutlineItem<Anchor>>> {
3539 let range = range.to_offset(self);
3540 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3541 grammar.outline_config.as_ref().map(|c| &c.query)
3542 });
3543 let configs = matches
3544 .grammars()
3545 .iter()
3546 .map(|g| g.outline_config.as_ref().unwrap())
3547 .collect::<Vec<_>>();
3548
3549 let mut items = Vec::new();
3550 let mut annotation_row_ranges: Vec<Range<u32>> = Vec::new();
3551 while let Some(mat) = matches.peek() {
3552 let config = &configs[mat.grammar_index];
3553 if let Some(item) =
3554 self.next_outline_item(config, &mat, &range, include_extra_context, theme)
3555 {
3556 items.push(item);
3557 } else if let Some(capture) = mat
3558 .captures
3559 .iter()
3560 .find(|capture| Some(capture.index) == config.annotation_capture_ix)
3561 {
3562 let capture_range = capture.node.start_position()..capture.node.end_position();
3563 let mut capture_row_range =
3564 capture_range.start.row as u32..capture_range.end.row as u32;
3565 if capture_range.end.row > capture_range.start.row && capture_range.end.column == 0
3566 {
3567 capture_row_range.end -= 1;
3568 }
3569 if let Some(last_row_range) = annotation_row_ranges.last_mut() {
3570 if last_row_range.end >= capture_row_range.start.saturating_sub(1) {
3571 last_row_range.end = capture_row_range.end;
3572 } else {
3573 annotation_row_ranges.push(capture_row_range);
3574 }
3575 } else {
3576 annotation_row_ranges.push(capture_row_range);
3577 }
3578 }
3579 matches.advance();
3580 }
3581
3582 items.sort_by_key(|item| (item.range.start, Reverse(item.range.end)));
3583
3584 // Assign depths based on containment relationships and convert to anchors.
3585 let mut item_ends_stack = Vec::<Point>::new();
3586 let mut anchor_items = Vec::new();
3587 let mut annotation_row_ranges = annotation_row_ranges.into_iter().peekable();
3588 for item in items {
3589 while let Some(last_end) = item_ends_stack.last().copied() {
3590 if last_end < item.range.end {
3591 item_ends_stack.pop();
3592 } else {
3593 break;
3594 }
3595 }
3596
3597 let mut annotation_row_range = None;
3598 while let Some(next_annotation_row_range) = annotation_row_ranges.peek() {
3599 let row_preceding_item = item.range.start.row.saturating_sub(1);
3600 if next_annotation_row_range.end < row_preceding_item {
3601 annotation_row_ranges.next();
3602 } else {
3603 if next_annotation_row_range.end == row_preceding_item {
3604 annotation_row_range = Some(next_annotation_row_range.clone());
3605 annotation_row_ranges.next();
3606 }
3607 break;
3608 }
3609 }
3610
3611 anchor_items.push(OutlineItem {
3612 depth: item_ends_stack.len(),
3613 range: self.anchor_after(item.range.start)..self.anchor_before(item.range.end),
3614 text: item.text,
3615 highlight_ranges: item.highlight_ranges,
3616 name_ranges: item.name_ranges,
3617 body_range: item.body_range.map(|body_range| {
3618 self.anchor_after(body_range.start)..self.anchor_before(body_range.end)
3619 }),
3620 annotation_range: annotation_row_range.map(|annotation_range| {
3621 self.anchor_after(Point::new(annotation_range.start, 0))
3622 ..self.anchor_before(Point::new(
3623 annotation_range.end,
3624 self.line_len(annotation_range.end),
3625 ))
3626 }),
3627 });
3628 item_ends_stack.push(item.range.end);
3629 }
3630
3631 Some(anchor_items)
3632 }
3633
3634 fn next_outline_item(
3635 &self,
3636 config: &OutlineConfig,
3637 mat: &SyntaxMapMatch,
3638 range: &Range<usize>,
3639 include_extra_context: bool,
3640 theme: Option<&SyntaxTheme>,
3641 ) -> Option<OutlineItem<Point>> {
3642 let item_node = mat.captures.iter().find_map(|cap| {
3643 if cap.index == config.item_capture_ix {
3644 Some(cap.node)
3645 } else {
3646 None
3647 }
3648 })?;
3649
3650 let item_byte_range = item_node.byte_range();
3651 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3652 return None;
3653 }
3654 let item_point_range = Point::from_ts_point(item_node.start_position())
3655 ..Point::from_ts_point(item_node.end_position());
3656
3657 let mut open_point = None;
3658 let mut close_point = None;
3659 let mut buffer_ranges = Vec::new();
3660 for capture in mat.captures {
3661 let node_is_name;
3662 if capture.index == config.name_capture_ix {
3663 node_is_name = true;
3664 } else if Some(capture.index) == config.context_capture_ix
3665 || (Some(capture.index) == config.extra_context_capture_ix && include_extra_context)
3666 {
3667 node_is_name = false;
3668 } else {
3669 if Some(capture.index) == config.open_capture_ix {
3670 open_point = Some(Point::from_ts_point(capture.node.end_position()));
3671 } else if Some(capture.index) == config.close_capture_ix {
3672 close_point = Some(Point::from_ts_point(capture.node.start_position()));
3673 }
3674
3675 continue;
3676 }
3677
3678 let mut range = capture.node.start_byte()..capture.node.end_byte();
3679 let start = capture.node.start_position();
3680 if capture.node.end_position().row > start.row {
3681 range.end = range.start + self.line_len(start.row as u32) as usize - start.column;
3682 }
3683
3684 if !range.is_empty() {
3685 buffer_ranges.push((range, node_is_name));
3686 }
3687 }
3688 if buffer_ranges.is_empty() {
3689 return None;
3690 }
3691 let mut text = String::new();
3692 let mut highlight_ranges = Vec::new();
3693 let mut name_ranges = Vec::new();
3694 let mut chunks = self.chunks(
3695 buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end,
3696 true,
3697 );
3698 let mut last_buffer_range_end = 0;
3699
3700 for (buffer_range, is_name) in buffer_ranges {
3701 let space_added = !text.is_empty() && buffer_range.start > last_buffer_range_end;
3702 if space_added {
3703 text.push(' ');
3704 }
3705 let before_append_len = text.len();
3706 let mut offset = buffer_range.start;
3707 chunks.seek(buffer_range.clone());
3708 for mut chunk in chunks.by_ref() {
3709 if chunk.text.len() > buffer_range.end - offset {
3710 chunk.text = &chunk.text[0..(buffer_range.end - offset)];
3711 offset = buffer_range.end;
3712 } else {
3713 offset += chunk.text.len();
3714 }
3715 let style = chunk
3716 .syntax_highlight_id
3717 .zip(theme)
3718 .and_then(|(highlight, theme)| highlight.style(theme));
3719 if let Some(style) = style {
3720 let start = text.len();
3721 let end = start + chunk.text.len();
3722 highlight_ranges.push((start..end, style));
3723 }
3724 text.push_str(chunk.text);
3725 if offset >= buffer_range.end {
3726 break;
3727 }
3728 }
3729 if is_name {
3730 let after_append_len = text.len();
3731 let start = if space_added && !name_ranges.is_empty() {
3732 before_append_len - 1
3733 } else {
3734 before_append_len
3735 };
3736 name_ranges.push(start..after_append_len);
3737 }
3738 last_buffer_range_end = buffer_range.end;
3739 }
3740
3741 Some(OutlineItem {
3742 depth: 0, // We'll calculate the depth later
3743 range: item_point_range,
3744 text,
3745 highlight_ranges,
3746 name_ranges,
3747 body_range: open_point.zip(close_point).map(|(start, end)| start..end),
3748 annotation_range: None,
3749 })
3750 }
3751
3752 pub fn function_body_fold_ranges<T: ToOffset>(
3753 &self,
3754 within: Range<T>,
3755 ) -> impl Iterator<Item = Range<usize>> + '_ {
3756 self.text_object_ranges(within, TreeSitterOptions::default())
3757 .filter_map(|(range, obj)| (obj == TextObject::InsideFunction).then_some(range))
3758 }
3759
3760 /// For each grammar in the language, runs the provided
3761 /// [`tree_sitter::Query`] against the given range.
3762 pub fn matches(
3763 &self,
3764 range: Range<usize>,
3765 query: fn(&Grammar) -> Option<&tree_sitter::Query>,
3766 ) -> SyntaxMapMatches {
3767 self.syntax.matches(range, self, query)
3768 }
3769
3770 pub fn all_bracket_ranges(
3771 &self,
3772 range: Range<usize>,
3773 ) -> impl Iterator<Item = BracketMatch> + '_ {
3774 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3775 grammar.brackets_config.as_ref().map(|c| &c.query)
3776 });
3777 let configs = matches
3778 .grammars()
3779 .iter()
3780 .map(|grammar| grammar.brackets_config.as_ref().unwrap())
3781 .collect::<Vec<_>>();
3782
3783 iter::from_fn(move || {
3784 while let Some(mat) = matches.peek() {
3785 let mut open = None;
3786 let mut close = None;
3787 let config = &configs[mat.grammar_index];
3788 let pattern = &config.patterns[mat.pattern_index];
3789 for capture in mat.captures {
3790 if capture.index == config.open_capture_ix {
3791 open = Some(capture.node.byte_range());
3792 } else if capture.index == config.close_capture_ix {
3793 close = Some(capture.node.byte_range());
3794 }
3795 }
3796
3797 matches.advance();
3798
3799 let Some((open_range, close_range)) = open.zip(close) else {
3800 continue;
3801 };
3802
3803 let bracket_range = open_range.start..=close_range.end;
3804 if !bracket_range.overlaps(&range) {
3805 continue;
3806 }
3807
3808 return Some(BracketMatch {
3809 open_range,
3810 close_range,
3811 newline_only: pattern.newline_only,
3812 });
3813 }
3814 None
3815 })
3816 }
3817
3818 /// Returns bracket range pairs overlapping or adjacent to `range`
3819 pub fn bracket_ranges<T: ToOffset>(
3820 &self,
3821 range: Range<T>,
3822 ) -> impl Iterator<Item = BracketMatch> + '_ {
3823 // Find bracket pairs that *inclusively* contain the given range.
3824 let range = range.start.to_offset(self).saturating_sub(1)
3825 ..self.len().min(range.end.to_offset(self) + 1);
3826 self.all_bracket_ranges(range)
3827 .filter(|pair| !pair.newline_only)
3828 }
3829
3830 pub fn text_object_ranges<T: ToOffset>(
3831 &self,
3832 range: Range<T>,
3833 options: TreeSitterOptions,
3834 ) -> impl Iterator<Item = (Range<usize>, TextObject)> + '_ {
3835 let range = range.start.to_offset(self).saturating_sub(1)
3836 ..self.len().min(range.end.to_offset(self) + 1);
3837
3838 let mut matches =
3839 self.syntax
3840 .matches_with_options(range.clone(), &self.text, options, |grammar| {
3841 grammar.text_object_config.as_ref().map(|c| &c.query)
3842 });
3843
3844 let configs = matches
3845 .grammars()
3846 .iter()
3847 .map(|grammar| grammar.text_object_config.as_ref())
3848 .collect::<Vec<_>>();
3849
3850 let mut captures = Vec::<(Range<usize>, TextObject)>::new();
3851
3852 iter::from_fn(move || {
3853 loop {
3854 while let Some(capture) = captures.pop() {
3855 if capture.0.overlaps(&range) {
3856 return Some(capture);
3857 }
3858 }
3859
3860 let mat = matches.peek()?;
3861
3862 let Some(config) = configs[mat.grammar_index].as_ref() else {
3863 matches.advance();
3864 continue;
3865 };
3866
3867 for capture in mat.captures {
3868 let Some(ix) = config
3869 .text_objects_by_capture_ix
3870 .binary_search_by_key(&capture.index, |e| e.0)
3871 .ok()
3872 else {
3873 continue;
3874 };
3875 let text_object = config.text_objects_by_capture_ix[ix].1;
3876 let byte_range = capture.node.byte_range();
3877
3878 let mut found = false;
3879 for (range, existing) in captures.iter_mut() {
3880 if existing == &text_object {
3881 range.start = range.start.min(byte_range.start);
3882 range.end = range.end.max(byte_range.end);
3883 found = true;
3884 break;
3885 }
3886 }
3887
3888 if !found {
3889 captures.push((byte_range, text_object));
3890 }
3891 }
3892
3893 matches.advance();
3894 }
3895 })
3896 }
3897
3898 /// Returns enclosing bracket ranges containing the given range
3899 pub fn enclosing_bracket_ranges<T: ToOffset>(
3900 &self,
3901 range: Range<T>,
3902 ) -> impl Iterator<Item = BracketMatch> + '_ {
3903 let range = range.start.to_offset(self)..range.end.to_offset(self);
3904
3905 self.bracket_ranges(range.clone()).filter(move |pair| {
3906 pair.open_range.start <= range.start && pair.close_range.end >= range.end
3907 })
3908 }
3909
3910 /// Returns the smallest enclosing bracket ranges containing the given range or None if no brackets contain range
3911 ///
3912 /// Can optionally pass a range_filter to filter the ranges of brackets to consider
3913 pub fn innermost_enclosing_bracket_ranges<T: ToOffset>(
3914 &self,
3915 range: Range<T>,
3916 range_filter: Option<&dyn Fn(Range<usize>, Range<usize>) -> bool>,
3917 ) -> Option<(Range<usize>, Range<usize>)> {
3918 let range = range.start.to_offset(self)..range.end.to_offset(self);
3919
3920 // Get the ranges of the innermost pair of brackets.
3921 let mut result: Option<(Range<usize>, Range<usize>)> = None;
3922
3923 for pair in self.enclosing_bracket_ranges(range.clone()) {
3924 if let Some(range_filter) = range_filter {
3925 if !range_filter(pair.open_range.clone(), pair.close_range.clone()) {
3926 continue;
3927 }
3928 }
3929
3930 let len = pair.close_range.end - pair.open_range.start;
3931
3932 if let Some((existing_open, existing_close)) = &result {
3933 let existing_len = existing_close.end - existing_open.start;
3934 if len > existing_len {
3935 continue;
3936 }
3937 }
3938
3939 result = Some((pair.open_range, pair.close_range));
3940 }
3941
3942 result
3943 }
3944
3945 /// Returns anchor ranges for any matches of the redaction query.
3946 /// The buffer can be associated with multiple languages, and the redaction query associated with each
3947 /// will be run on the relevant section of the buffer.
3948 pub fn redacted_ranges<T: ToOffset>(
3949 &self,
3950 range: Range<T>,
3951 ) -> impl Iterator<Item = Range<usize>> + '_ {
3952 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
3953 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
3954 grammar
3955 .redactions_config
3956 .as_ref()
3957 .map(|config| &config.query)
3958 });
3959
3960 let configs = syntax_matches
3961 .grammars()
3962 .iter()
3963 .map(|grammar| grammar.redactions_config.as_ref())
3964 .collect::<Vec<_>>();
3965
3966 iter::from_fn(move || {
3967 let redacted_range = syntax_matches
3968 .peek()
3969 .and_then(|mat| {
3970 configs[mat.grammar_index].and_then(|config| {
3971 mat.captures
3972 .iter()
3973 .find(|capture| capture.index == config.redaction_capture_ix)
3974 })
3975 })
3976 .map(|mat| mat.node.byte_range());
3977 syntax_matches.advance();
3978 redacted_range
3979 })
3980 }
3981
3982 pub fn injections_intersecting_range<T: ToOffset>(
3983 &self,
3984 range: Range<T>,
3985 ) -> impl Iterator<Item = (Range<usize>, &Arc<Language>)> + '_ {
3986 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
3987
3988 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
3989 grammar
3990 .injection_config
3991 .as_ref()
3992 .map(|config| &config.query)
3993 });
3994
3995 let configs = syntax_matches
3996 .grammars()
3997 .iter()
3998 .map(|grammar| grammar.injection_config.as_ref())
3999 .collect::<Vec<_>>();
4000
4001 iter::from_fn(move || {
4002 let ranges = syntax_matches.peek().and_then(|mat| {
4003 let config = &configs[mat.grammar_index]?;
4004 let content_capture_range = mat.captures.iter().find_map(|capture| {
4005 if capture.index == config.content_capture_ix {
4006 Some(capture.node.byte_range())
4007 } else {
4008 None
4009 }
4010 })?;
4011 let language = self.language_at(content_capture_range.start)?;
4012 Some((content_capture_range, language))
4013 });
4014 syntax_matches.advance();
4015 ranges
4016 })
4017 }
4018
4019 pub fn runnable_ranges(
4020 &self,
4021 offset_range: Range<usize>,
4022 ) -> impl Iterator<Item = RunnableRange> + '_ {
4023 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4024 grammar.runnable_config.as_ref().map(|config| &config.query)
4025 });
4026
4027 let test_configs = syntax_matches
4028 .grammars()
4029 .iter()
4030 .map(|grammar| grammar.runnable_config.as_ref())
4031 .collect::<Vec<_>>();
4032
4033 iter::from_fn(move || {
4034 loop {
4035 let mat = syntax_matches.peek()?;
4036
4037 let test_range = test_configs[mat.grammar_index].and_then(|test_configs| {
4038 let mut run_range = None;
4039 let full_range = mat.captures.iter().fold(
4040 Range {
4041 start: usize::MAX,
4042 end: 0,
4043 },
4044 |mut acc, next| {
4045 let byte_range = next.node.byte_range();
4046 if acc.start > byte_range.start {
4047 acc.start = byte_range.start;
4048 }
4049 if acc.end < byte_range.end {
4050 acc.end = byte_range.end;
4051 }
4052 acc
4053 },
4054 );
4055 if full_range.start > full_range.end {
4056 // We did not find a full spanning range of this match.
4057 return None;
4058 }
4059 let extra_captures: SmallVec<[_; 1]> =
4060 SmallVec::from_iter(mat.captures.iter().filter_map(|capture| {
4061 test_configs
4062 .extra_captures
4063 .get(capture.index as usize)
4064 .cloned()
4065 .and_then(|tag_name| match tag_name {
4066 RunnableCapture::Named(name) => {
4067 Some((capture.node.byte_range(), name))
4068 }
4069 RunnableCapture::Run => {
4070 let _ = run_range.insert(capture.node.byte_range());
4071 None
4072 }
4073 })
4074 }));
4075 let run_range = run_range?;
4076 let tags = test_configs
4077 .query
4078 .property_settings(mat.pattern_index)
4079 .iter()
4080 .filter_map(|property| {
4081 if *property.key == *"tag" {
4082 property
4083 .value
4084 .as_ref()
4085 .map(|value| RunnableTag(value.to_string().into()))
4086 } else {
4087 None
4088 }
4089 })
4090 .collect();
4091 let extra_captures = extra_captures
4092 .into_iter()
4093 .map(|(range, name)| {
4094 (
4095 name.to_string(),
4096 self.text_for_range(range.clone()).collect::<String>(),
4097 )
4098 })
4099 .collect();
4100 // All tags should have the same range.
4101 Some(RunnableRange {
4102 run_range,
4103 full_range,
4104 runnable: Runnable {
4105 tags,
4106 language: mat.language,
4107 buffer: self.remote_id(),
4108 },
4109 extra_captures,
4110 buffer_id: self.remote_id(),
4111 })
4112 });
4113
4114 syntax_matches.advance();
4115 if test_range.is_some() {
4116 // It's fine for us to short-circuit on .peek()? returning None. We don't want to return None from this iter if we
4117 // had a capture that did not contain a run marker, hence we'll just loop around for the next capture.
4118 return test_range;
4119 }
4120 }
4121 })
4122 }
4123
4124 /// Returns selections for remote peers intersecting the given range.
4125 #[allow(clippy::type_complexity)]
4126 pub fn selections_in_range(
4127 &self,
4128 range: Range<Anchor>,
4129 include_local: bool,
4130 ) -> impl Iterator<
4131 Item = (
4132 ReplicaId,
4133 bool,
4134 CursorShape,
4135 impl Iterator<Item = &Selection<Anchor>> + '_,
4136 ),
4137 > + '_ {
4138 self.remote_selections
4139 .iter()
4140 .filter(move |(replica_id, set)| {
4141 (include_local || **replica_id != self.text.replica_id())
4142 && !set.selections.is_empty()
4143 })
4144 .map(move |(replica_id, set)| {
4145 let start_ix = match set.selections.binary_search_by(|probe| {
4146 probe.end.cmp(&range.start, self).then(Ordering::Greater)
4147 }) {
4148 Ok(ix) | Err(ix) => ix,
4149 };
4150 let end_ix = match set.selections.binary_search_by(|probe| {
4151 probe.start.cmp(&range.end, self).then(Ordering::Less)
4152 }) {
4153 Ok(ix) | Err(ix) => ix,
4154 };
4155
4156 (
4157 *replica_id,
4158 set.line_mode,
4159 set.cursor_shape,
4160 set.selections[start_ix..end_ix].iter(),
4161 )
4162 })
4163 }
4164
4165 /// Returns if the buffer contains any diagnostics.
4166 pub fn has_diagnostics(&self) -> bool {
4167 !self.diagnostics.is_empty()
4168 }
4169
4170 /// Returns all the diagnostics intersecting the given range.
4171 pub fn diagnostics_in_range<'a, T, O>(
4172 &'a self,
4173 search_range: Range<T>,
4174 reversed: bool,
4175 ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
4176 where
4177 T: 'a + Clone + ToOffset,
4178 O: 'a + FromAnchor,
4179 {
4180 let mut iterators: Vec<_> = self
4181 .diagnostics
4182 .iter()
4183 .map(|(_, collection)| {
4184 collection
4185 .range::<T, text::Anchor>(search_range.clone(), self, true, reversed)
4186 .peekable()
4187 })
4188 .collect();
4189
4190 std::iter::from_fn(move || {
4191 let (next_ix, _) = iterators
4192 .iter_mut()
4193 .enumerate()
4194 .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
4195 .min_by(|(_, a), (_, b)| {
4196 let cmp = a
4197 .range
4198 .start
4199 .cmp(&b.range.start, self)
4200 // when range is equal, sort by diagnostic severity
4201 .then(a.diagnostic.severity.cmp(&b.diagnostic.severity))
4202 // and stabilize order with group_id
4203 .then(a.diagnostic.group_id.cmp(&b.diagnostic.group_id));
4204 if reversed { cmp.reverse() } else { cmp }
4205 })?;
4206 iterators[next_ix]
4207 .next()
4208 .map(|DiagnosticEntry { range, diagnostic }| DiagnosticEntry {
4209 diagnostic,
4210 range: FromAnchor::from_anchor(&range.start, self)
4211 ..FromAnchor::from_anchor(&range.end, self),
4212 })
4213 })
4214 }
4215
4216 /// Returns all the diagnostic groups associated with the given
4217 /// language server ID. If no language server ID is provided,
4218 /// all diagnostics groups are returned.
4219 pub fn diagnostic_groups(
4220 &self,
4221 language_server_id: Option<LanguageServerId>,
4222 ) -> Vec<(LanguageServerId, DiagnosticGroup<Anchor>)> {
4223 let mut groups = Vec::new();
4224
4225 if let Some(language_server_id) = language_server_id {
4226 if let Ok(ix) = self
4227 .diagnostics
4228 .binary_search_by_key(&language_server_id, |e| e.0)
4229 {
4230 self.diagnostics[ix]
4231 .1
4232 .groups(language_server_id, &mut groups, self);
4233 }
4234 } else {
4235 for (language_server_id, diagnostics) in self.diagnostics.iter() {
4236 diagnostics.groups(*language_server_id, &mut groups, self);
4237 }
4238 }
4239
4240 groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
4241 let a_start = &group_a.entries[group_a.primary_ix].range.start;
4242 let b_start = &group_b.entries[group_b.primary_ix].range.start;
4243 a_start.cmp(b_start, self).then_with(|| id_a.cmp(id_b))
4244 });
4245
4246 groups
4247 }
4248
4249 /// Returns an iterator over the diagnostics for the given group.
4250 pub fn diagnostic_group<O>(
4251 &self,
4252 group_id: usize,
4253 ) -> impl Iterator<Item = DiagnosticEntry<O>> + '_
4254 where
4255 O: FromAnchor + 'static,
4256 {
4257 self.diagnostics
4258 .iter()
4259 .flat_map(move |(_, set)| set.group(group_id, self))
4260 }
4261
4262 /// An integer version number that accounts for all updates besides
4263 /// the buffer's text itself (which is versioned via a version vector).
4264 pub fn non_text_state_update_count(&self) -> usize {
4265 self.non_text_state_update_count
4266 }
4267
4268 /// Returns a snapshot of underlying file.
4269 pub fn file(&self) -> Option<&Arc<dyn File>> {
4270 self.file.as_ref()
4271 }
4272
4273 /// Resolves the file path (relative to the worktree root) associated with the underlying file.
4274 pub fn resolve_file_path(&self, cx: &App, include_root: bool) -> Option<PathBuf> {
4275 if let Some(file) = self.file() {
4276 if file.path().file_name().is_none() || include_root {
4277 Some(file.full_path(cx))
4278 } else {
4279 Some(file.path().to_path_buf())
4280 }
4281 } else {
4282 None
4283 }
4284 }
4285
4286 pub fn words_in_range(&self, query: WordsQuery) -> BTreeMap<String, Range<Anchor>> {
4287 let query_str = query.fuzzy_contents;
4288 if query_str.map_or(false, |query| query.is_empty()) {
4289 return BTreeMap::default();
4290 }
4291
4292 let classifier = CharClassifier::new(self.language.clone().map(|language| LanguageScope {
4293 language,
4294 override_id: None,
4295 }));
4296
4297 let mut query_ix = 0;
4298 let query_chars = query_str.map(|query| query.chars().collect::<Vec<_>>());
4299 let query_len = query_chars.as_ref().map_or(0, |query| query.len());
4300
4301 let mut words = BTreeMap::default();
4302 let mut current_word_start_ix = None;
4303 let mut chunk_ix = query.range.start;
4304 for chunk in self.chunks(query.range, false) {
4305 for (i, c) in chunk.text.char_indices() {
4306 let ix = chunk_ix + i;
4307 if classifier.is_word(c) {
4308 if current_word_start_ix.is_none() {
4309 current_word_start_ix = Some(ix);
4310 }
4311
4312 if let Some(query_chars) = &query_chars {
4313 if query_ix < query_len {
4314 if c.to_lowercase().eq(query_chars[query_ix].to_lowercase()) {
4315 query_ix += 1;
4316 }
4317 }
4318 }
4319 continue;
4320 } else if let Some(word_start) = current_word_start_ix.take() {
4321 if query_ix == query_len {
4322 let word_range = self.anchor_before(word_start)..self.anchor_after(ix);
4323 let mut word_text = self.text_for_range(word_start..ix).peekable();
4324 let first_char = word_text
4325 .peek()
4326 .and_then(|first_chunk| first_chunk.chars().next());
4327 // Skip empty and "words" starting with digits as a heuristic to reduce useless completions
4328 if !query.skip_digits
4329 || first_char.map_or(true, |first_char| !first_char.is_digit(10))
4330 {
4331 words.insert(word_text.collect(), word_range);
4332 }
4333 }
4334 }
4335 query_ix = 0;
4336 }
4337 chunk_ix += chunk.text.len();
4338 }
4339
4340 words
4341 }
4342}
4343
4344pub struct WordsQuery<'a> {
4345 /// Only returns words with all chars from the fuzzy string in them.
4346 pub fuzzy_contents: Option<&'a str>,
4347 /// Skips words that start with a digit.
4348 pub skip_digits: bool,
4349 /// Buffer offset range, to look for words.
4350 pub range: Range<usize>,
4351}
4352
4353fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
4354 indent_size_for_text(text.chars_at(Point::new(row, 0)))
4355}
4356
4357fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
4358 let mut result = IndentSize::spaces(0);
4359 for c in text {
4360 let kind = match c {
4361 ' ' => IndentKind::Space,
4362 '\t' => IndentKind::Tab,
4363 _ => break,
4364 };
4365 if result.len == 0 {
4366 result.kind = kind;
4367 }
4368 result.len += 1;
4369 }
4370 result
4371}
4372
4373impl Clone for BufferSnapshot {
4374 fn clone(&self) -> Self {
4375 Self {
4376 text: self.text.clone(),
4377 syntax: self.syntax.clone(),
4378 file: self.file.clone(),
4379 remote_selections: self.remote_selections.clone(),
4380 diagnostics: self.diagnostics.clone(),
4381 language: self.language.clone(),
4382 non_text_state_update_count: self.non_text_state_update_count,
4383 }
4384 }
4385}
4386
4387impl Deref for BufferSnapshot {
4388 type Target = text::BufferSnapshot;
4389
4390 fn deref(&self) -> &Self::Target {
4391 &self.text
4392 }
4393}
4394
4395unsafe impl Send for BufferChunks<'_> {}
4396
4397impl<'a> BufferChunks<'a> {
4398 pub(crate) fn new(
4399 text: &'a Rope,
4400 range: Range<usize>,
4401 syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
4402 diagnostics: bool,
4403 buffer_snapshot: Option<&'a BufferSnapshot>,
4404 ) -> Self {
4405 let mut highlights = None;
4406 if let Some((captures, highlight_maps)) = syntax {
4407 highlights = Some(BufferChunkHighlights {
4408 captures,
4409 next_capture: None,
4410 stack: Default::default(),
4411 highlight_maps,
4412 })
4413 }
4414
4415 let diagnostic_endpoints = diagnostics.then(|| Vec::new().into_iter().peekable());
4416 let chunks = text.chunks_in_range(range.clone());
4417
4418 let mut this = BufferChunks {
4419 range,
4420 buffer_snapshot,
4421 chunks,
4422 diagnostic_endpoints,
4423 error_depth: 0,
4424 warning_depth: 0,
4425 information_depth: 0,
4426 hint_depth: 0,
4427 unnecessary_depth: 0,
4428 underline: true,
4429 highlights,
4430 };
4431 this.initialize_diagnostic_endpoints();
4432 this
4433 }
4434
4435 /// Seeks to the given byte offset in the buffer.
4436 pub fn seek(&mut self, range: Range<usize>) {
4437 let old_range = std::mem::replace(&mut self.range, range.clone());
4438 self.chunks.set_range(self.range.clone());
4439 if let Some(highlights) = self.highlights.as_mut() {
4440 if old_range.start <= self.range.start && old_range.end >= self.range.end {
4441 // Reuse existing highlights stack, as the new range is a subrange of the old one.
4442 highlights
4443 .stack
4444 .retain(|(end_offset, _)| *end_offset > range.start);
4445 if let Some(capture) = &highlights.next_capture {
4446 if range.start >= capture.node.start_byte() {
4447 let next_capture_end = capture.node.end_byte();
4448 if range.start < next_capture_end {
4449 highlights.stack.push((
4450 next_capture_end,
4451 highlights.highlight_maps[capture.grammar_index].get(capture.index),
4452 ));
4453 }
4454 highlights.next_capture.take();
4455 }
4456 }
4457 } else if let Some(snapshot) = self.buffer_snapshot {
4458 let (captures, highlight_maps) = snapshot.get_highlights(self.range.clone());
4459 *highlights = BufferChunkHighlights {
4460 captures,
4461 next_capture: None,
4462 stack: Default::default(),
4463 highlight_maps,
4464 };
4465 } else {
4466 // We cannot obtain new highlights for a language-aware buffer iterator, as we don't have a buffer snapshot.
4467 // Seeking such BufferChunks is not supported.
4468 debug_assert!(
4469 false,
4470 "Attempted to seek on a language-aware buffer iterator without associated buffer snapshot"
4471 );
4472 }
4473
4474 highlights.captures.set_byte_range(self.range.clone());
4475 self.initialize_diagnostic_endpoints();
4476 }
4477 }
4478
4479 fn initialize_diagnostic_endpoints(&mut self) {
4480 if let Some(diagnostics) = self.diagnostic_endpoints.as_mut() {
4481 if let Some(buffer) = self.buffer_snapshot {
4482 let mut diagnostic_endpoints = Vec::new();
4483 for entry in buffer.diagnostics_in_range::<_, usize>(self.range.clone(), false) {
4484 diagnostic_endpoints.push(DiagnosticEndpoint {
4485 offset: entry.range.start,
4486 is_start: true,
4487 severity: entry.diagnostic.severity,
4488 is_unnecessary: entry.diagnostic.is_unnecessary,
4489 underline: entry.diagnostic.underline,
4490 });
4491 diagnostic_endpoints.push(DiagnosticEndpoint {
4492 offset: entry.range.end,
4493 is_start: false,
4494 severity: entry.diagnostic.severity,
4495 is_unnecessary: entry.diagnostic.is_unnecessary,
4496 underline: entry.diagnostic.underline,
4497 });
4498 }
4499 diagnostic_endpoints
4500 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
4501 *diagnostics = diagnostic_endpoints.into_iter().peekable();
4502 self.hint_depth = 0;
4503 self.error_depth = 0;
4504 self.warning_depth = 0;
4505 self.information_depth = 0;
4506 }
4507 }
4508 }
4509
4510 /// The current byte offset in the buffer.
4511 pub fn offset(&self) -> usize {
4512 self.range.start
4513 }
4514
4515 pub fn range(&self) -> Range<usize> {
4516 self.range.clone()
4517 }
4518
4519 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
4520 let depth = match endpoint.severity {
4521 DiagnosticSeverity::ERROR => &mut self.error_depth,
4522 DiagnosticSeverity::WARNING => &mut self.warning_depth,
4523 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
4524 DiagnosticSeverity::HINT => &mut self.hint_depth,
4525 _ => return,
4526 };
4527 if endpoint.is_start {
4528 *depth += 1;
4529 } else {
4530 *depth -= 1;
4531 }
4532
4533 if endpoint.is_unnecessary {
4534 if endpoint.is_start {
4535 self.unnecessary_depth += 1;
4536 } else {
4537 self.unnecessary_depth -= 1;
4538 }
4539 }
4540 }
4541
4542 fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
4543 if self.error_depth > 0 {
4544 Some(DiagnosticSeverity::ERROR)
4545 } else if self.warning_depth > 0 {
4546 Some(DiagnosticSeverity::WARNING)
4547 } else if self.information_depth > 0 {
4548 Some(DiagnosticSeverity::INFORMATION)
4549 } else if self.hint_depth > 0 {
4550 Some(DiagnosticSeverity::HINT)
4551 } else {
4552 None
4553 }
4554 }
4555
4556 fn current_code_is_unnecessary(&self) -> bool {
4557 self.unnecessary_depth > 0
4558 }
4559}
4560
4561impl<'a> Iterator for BufferChunks<'a> {
4562 type Item = Chunk<'a>;
4563
4564 fn next(&mut self) -> Option<Self::Item> {
4565 let mut next_capture_start = usize::MAX;
4566 let mut next_diagnostic_endpoint = usize::MAX;
4567
4568 if let Some(highlights) = self.highlights.as_mut() {
4569 while let Some((parent_capture_end, _)) = highlights.stack.last() {
4570 if *parent_capture_end <= self.range.start {
4571 highlights.stack.pop();
4572 } else {
4573 break;
4574 }
4575 }
4576
4577 if highlights.next_capture.is_none() {
4578 highlights.next_capture = highlights.captures.next();
4579 }
4580
4581 while let Some(capture) = highlights.next_capture.as_ref() {
4582 if self.range.start < capture.node.start_byte() {
4583 next_capture_start = capture.node.start_byte();
4584 break;
4585 } else {
4586 let highlight_id =
4587 highlights.highlight_maps[capture.grammar_index].get(capture.index);
4588 highlights
4589 .stack
4590 .push((capture.node.end_byte(), highlight_id));
4591 highlights.next_capture = highlights.captures.next();
4592 }
4593 }
4594 }
4595
4596 let mut diagnostic_endpoints = std::mem::take(&mut self.diagnostic_endpoints);
4597 if let Some(diagnostic_endpoints) = diagnostic_endpoints.as_mut() {
4598 while let Some(endpoint) = diagnostic_endpoints.peek().copied() {
4599 if endpoint.offset <= self.range.start {
4600 self.update_diagnostic_depths(endpoint);
4601 diagnostic_endpoints.next();
4602 self.underline = endpoint.underline;
4603 } else {
4604 next_diagnostic_endpoint = endpoint.offset;
4605 break;
4606 }
4607 }
4608 }
4609 self.diagnostic_endpoints = diagnostic_endpoints;
4610
4611 if let Some(chunk) = self.chunks.peek() {
4612 let chunk_start = self.range.start;
4613 let mut chunk_end = (self.chunks.offset() + chunk.len())
4614 .min(next_capture_start)
4615 .min(next_diagnostic_endpoint);
4616 let mut highlight_id = None;
4617 if let Some(highlights) = self.highlights.as_ref() {
4618 if let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last() {
4619 chunk_end = chunk_end.min(*parent_capture_end);
4620 highlight_id = Some(*parent_highlight_id);
4621 }
4622 }
4623
4624 let slice =
4625 &chunk[chunk_start - self.chunks.offset()..chunk_end - self.chunks.offset()];
4626 self.range.start = chunk_end;
4627 if self.range.start == self.chunks.offset() + chunk.len() {
4628 self.chunks.next().unwrap();
4629 }
4630
4631 Some(Chunk {
4632 text: slice,
4633 syntax_highlight_id: highlight_id,
4634 underline: self.underline,
4635 diagnostic_severity: self.current_diagnostic_severity(),
4636 is_unnecessary: self.current_code_is_unnecessary(),
4637 ..Chunk::default()
4638 })
4639 } else {
4640 None
4641 }
4642 }
4643}
4644
4645impl operation_queue::Operation for Operation {
4646 fn lamport_timestamp(&self) -> clock::Lamport {
4647 match self {
4648 Operation::Buffer(_) => {
4649 unreachable!("buffer operations should never be deferred at this layer")
4650 }
4651 Operation::UpdateDiagnostics {
4652 lamport_timestamp, ..
4653 }
4654 | Operation::UpdateSelections {
4655 lamport_timestamp, ..
4656 }
4657 | Operation::UpdateCompletionTriggers {
4658 lamport_timestamp, ..
4659 } => *lamport_timestamp,
4660 }
4661 }
4662}
4663
4664impl Default for Diagnostic {
4665 fn default() -> Self {
4666 Self {
4667 source: Default::default(),
4668 source_kind: DiagnosticSourceKind::Other,
4669 code: None,
4670 code_description: None,
4671 severity: DiagnosticSeverity::ERROR,
4672 message: Default::default(),
4673 markdown: None,
4674 group_id: 0,
4675 is_primary: false,
4676 is_disk_based: false,
4677 is_unnecessary: false,
4678 underline: true,
4679 data: None,
4680 }
4681 }
4682}
4683
4684impl IndentSize {
4685 /// Returns an [`IndentSize`] representing the given spaces.
4686 pub fn spaces(len: u32) -> Self {
4687 Self {
4688 len,
4689 kind: IndentKind::Space,
4690 }
4691 }
4692
4693 /// Returns an [`IndentSize`] representing a tab.
4694 pub fn tab() -> Self {
4695 Self {
4696 len: 1,
4697 kind: IndentKind::Tab,
4698 }
4699 }
4700
4701 /// An iterator over the characters represented by this [`IndentSize`].
4702 pub fn chars(&self) -> impl Iterator<Item = char> {
4703 iter::repeat(self.char()).take(self.len as usize)
4704 }
4705
4706 /// The character representation of this [`IndentSize`].
4707 pub fn char(&self) -> char {
4708 match self.kind {
4709 IndentKind::Space => ' ',
4710 IndentKind::Tab => '\t',
4711 }
4712 }
4713
4714 /// Consumes the current [`IndentSize`] and returns a new one that has
4715 /// been shrunk or enlarged by the given size along the given direction.
4716 pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
4717 match direction {
4718 Ordering::Less => {
4719 if self.kind == size.kind && self.len >= size.len {
4720 self.len -= size.len;
4721 }
4722 }
4723 Ordering::Equal => {}
4724 Ordering::Greater => {
4725 if self.len == 0 {
4726 self = size;
4727 } else if self.kind == size.kind {
4728 self.len += size.len;
4729 }
4730 }
4731 }
4732 self
4733 }
4734
4735 pub fn len_with_expanded_tabs(&self, tab_size: NonZeroU32) -> usize {
4736 match self.kind {
4737 IndentKind::Space => self.len as usize,
4738 IndentKind::Tab => self.len as usize * tab_size.get() as usize,
4739 }
4740 }
4741}
4742
4743#[cfg(any(test, feature = "test-support"))]
4744pub struct TestFile {
4745 pub path: Arc<Path>,
4746 pub root_name: String,
4747 pub local_root: Option<PathBuf>,
4748}
4749
4750#[cfg(any(test, feature = "test-support"))]
4751impl File for TestFile {
4752 fn path(&self) -> &Arc<Path> {
4753 &self.path
4754 }
4755
4756 fn full_path(&self, _: &gpui::App) -> PathBuf {
4757 PathBuf::from(&self.root_name).join(self.path.as_ref())
4758 }
4759
4760 fn as_local(&self) -> Option<&dyn LocalFile> {
4761 if self.local_root.is_some() {
4762 Some(self)
4763 } else {
4764 None
4765 }
4766 }
4767
4768 fn disk_state(&self) -> DiskState {
4769 unimplemented!()
4770 }
4771
4772 fn file_name<'a>(&'a self, _: &'a gpui::App) -> &'a std::ffi::OsStr {
4773 self.path().file_name().unwrap_or(self.root_name.as_ref())
4774 }
4775
4776 fn worktree_id(&self, _: &App) -> WorktreeId {
4777 WorktreeId::from_usize(0)
4778 }
4779
4780 fn to_proto(&self, _: &App) -> rpc::proto::File {
4781 unimplemented!()
4782 }
4783
4784 fn is_private(&self) -> bool {
4785 false
4786 }
4787}
4788
4789#[cfg(any(test, feature = "test-support"))]
4790impl LocalFile for TestFile {
4791 fn abs_path(&self, _cx: &App) -> PathBuf {
4792 PathBuf::from(self.local_root.as_ref().unwrap())
4793 .join(&self.root_name)
4794 .join(self.path.as_ref())
4795 }
4796
4797 fn load(&self, _cx: &App) -> Task<Result<String>> {
4798 unimplemented!()
4799 }
4800
4801 fn load_bytes(&self, _cx: &App) -> Task<Result<Vec<u8>>> {
4802 unimplemented!()
4803 }
4804}
4805
4806pub(crate) fn contiguous_ranges(
4807 values: impl Iterator<Item = u32>,
4808 max_len: usize,
4809) -> impl Iterator<Item = Range<u32>> {
4810 let mut values = values;
4811 let mut current_range: Option<Range<u32>> = None;
4812 std::iter::from_fn(move || {
4813 loop {
4814 if let Some(value) = values.next() {
4815 if let Some(range) = &mut current_range {
4816 if value == range.end && range.len() < max_len {
4817 range.end += 1;
4818 continue;
4819 }
4820 }
4821
4822 let prev_range = current_range.clone();
4823 current_range = Some(value..(value + 1));
4824 if prev_range.is_some() {
4825 return prev_range;
4826 }
4827 } else {
4828 return current_range.take();
4829 }
4830 }
4831 })
4832}
4833
4834#[derive(Default, Debug)]
4835pub struct CharClassifier {
4836 scope: Option<LanguageScope>,
4837 for_completion: bool,
4838 ignore_punctuation: bool,
4839}
4840
4841impl CharClassifier {
4842 pub fn new(scope: Option<LanguageScope>) -> Self {
4843 Self {
4844 scope,
4845 for_completion: false,
4846 ignore_punctuation: false,
4847 }
4848 }
4849
4850 pub fn for_completion(self, for_completion: bool) -> Self {
4851 Self {
4852 for_completion,
4853 ..self
4854 }
4855 }
4856
4857 pub fn ignore_punctuation(self, ignore_punctuation: bool) -> Self {
4858 Self {
4859 ignore_punctuation,
4860 ..self
4861 }
4862 }
4863
4864 pub fn is_whitespace(&self, c: char) -> bool {
4865 self.kind(c) == CharKind::Whitespace
4866 }
4867
4868 pub fn is_word(&self, c: char) -> bool {
4869 self.kind(c) == CharKind::Word
4870 }
4871
4872 pub fn is_punctuation(&self, c: char) -> bool {
4873 self.kind(c) == CharKind::Punctuation
4874 }
4875
4876 pub fn kind_with(&self, c: char, ignore_punctuation: bool) -> CharKind {
4877 if c.is_alphanumeric() || c == '_' {
4878 return CharKind::Word;
4879 }
4880
4881 if let Some(scope) = &self.scope {
4882 let characters = if self.for_completion {
4883 scope.completion_query_characters()
4884 } else {
4885 scope.word_characters()
4886 };
4887 if let Some(characters) = characters {
4888 if characters.contains(&c) {
4889 return CharKind::Word;
4890 }
4891 }
4892 }
4893
4894 if c.is_whitespace() {
4895 return CharKind::Whitespace;
4896 }
4897
4898 if ignore_punctuation {
4899 CharKind::Word
4900 } else {
4901 CharKind::Punctuation
4902 }
4903 }
4904
4905 pub fn kind(&self, c: char) -> CharKind {
4906 self.kind_with(c, self.ignore_punctuation)
4907 }
4908}
4909
4910/// Find all of the ranges of whitespace that occur at the ends of lines
4911/// in the given rope.
4912///
4913/// This could also be done with a regex search, but this implementation
4914/// avoids copying text.
4915pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
4916 let mut ranges = Vec::new();
4917
4918 let mut offset = 0;
4919 let mut prev_chunk_trailing_whitespace_range = 0..0;
4920 for chunk in rope.chunks() {
4921 let mut prev_line_trailing_whitespace_range = 0..0;
4922 for (i, line) in chunk.split('\n').enumerate() {
4923 let line_end_offset = offset + line.len();
4924 let trimmed_line_len = line.trim_end_matches([' ', '\t']).len();
4925 let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
4926
4927 if i == 0 && trimmed_line_len == 0 {
4928 trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
4929 }
4930 if !prev_line_trailing_whitespace_range.is_empty() {
4931 ranges.push(prev_line_trailing_whitespace_range);
4932 }
4933
4934 offset = line_end_offset + 1;
4935 prev_line_trailing_whitespace_range = trailing_whitespace_range;
4936 }
4937
4938 offset -= 1;
4939 prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
4940 }
4941
4942 if !prev_chunk_trailing_whitespace_range.is_empty() {
4943 ranges.push(prev_chunk_trailing_whitespace_range);
4944 }
4945
4946 ranges
4947}