1pub use crate::{
2 Grammar, Language, LanguageRegistry,
3 diagnostic_set::DiagnosticSet,
4 highlight_map::{HighlightId, HighlightMap},
5 proto,
6};
7use crate::{
8 LanguageScope, Outline, OutlineConfig, RunnableCapture, RunnableTag, TextObject,
9 TreeSitterOptions,
10 diagnostic_set::{DiagnosticEntry, DiagnosticGroup},
11 language_settings::{LanguageSettings, language_settings},
12 outline::OutlineItem,
13 syntax_map::{
14 SyntaxLayer, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures, SyntaxMapMatch,
15 SyntaxMapMatches, SyntaxSnapshot, ToTreeSitterPoint,
16 },
17 task_context::RunnableRange,
18 text_diff::text_diff,
19};
20use anyhow::{Context as _, Result};
21pub use clock::ReplicaId;
22use clock::{AGENT_REPLICA_ID, Lamport};
23use collections::HashMap;
24use fs::MTime;
25use futures::channel::oneshot;
26use gpui::{
27 App, AppContext as _, Context, Entity, EventEmitter, HighlightStyle, SharedString, StyledText,
28 Task, TaskLabel, TextStyle,
29};
30use lsp::{LanguageServerId, NumberOrString};
31use parking_lot::Mutex;
32use schemars::JsonSchema;
33use serde::{Deserialize, Serialize};
34use serde_json::Value;
35use settings::WorktreeId;
36use smallvec::SmallVec;
37use smol::future::yield_now;
38use std::{
39 any::Any,
40 borrow::Cow,
41 cell::Cell,
42 cmp::{self, Ordering, Reverse},
43 collections::{BTreeMap, BTreeSet},
44 ffi::OsStr,
45 future::Future,
46 iter::{self, Iterator, Peekable},
47 mem,
48 num::NonZeroU32,
49 ops::{Deref, Range},
50 path::{Path, PathBuf},
51 rc,
52 sync::{Arc, LazyLock},
53 time::{Duration, Instant},
54 vec,
55};
56use sum_tree::TreeMap;
57use text::operation_queue::OperationQueue;
58use text::*;
59pub use text::{
60 Anchor, Bias, Buffer as TextBuffer, BufferId, BufferSnapshot as TextBufferSnapshot, Edit,
61 LineIndent, OffsetRangeExt, OffsetUtf16, Patch, Point, PointUtf16, Rope, Selection,
62 SelectionGoal, Subscription, TextDimension, TextSummary, ToOffset, ToOffsetUtf16, ToPoint,
63 ToPointUtf16, Transaction, TransactionId, Unclipped,
64};
65use theme::{ActiveTheme as _, SyntaxTheme};
66#[cfg(any(test, feature = "test-support"))]
67use util::RandomCharIter;
68use util::{RangeExt, debug_panic, maybe};
69
70#[cfg(any(test, feature = "test-support"))]
71pub use {tree_sitter_python, tree_sitter_rust, tree_sitter_typescript};
72
73pub use lsp::DiagnosticSeverity;
74
75/// A label for the background task spawned by the buffer to compute
76/// a diff against the contents of its file.
77pub static BUFFER_DIFF_TASK: LazyLock<TaskLabel> = LazyLock::new(TaskLabel::new);
78
79/// Indicate whether a [`Buffer`] has permissions to edit.
80#[derive(PartialEq, Clone, Copy, Debug)]
81pub enum Capability {
82 /// The buffer is a mutable replica.
83 ReadWrite,
84 /// The buffer is a read-only replica.
85 ReadOnly,
86}
87
88pub type BufferRow = u32;
89
90/// An in-memory representation of a source code file, including its text,
91/// syntax trees, git status, and diagnostics.
92pub struct Buffer {
93 text: TextBuffer,
94 branch_state: Option<BufferBranchState>,
95 /// Filesystem state, `None` when there is no path.
96 file: Option<Arc<dyn File>>,
97 /// The mtime of the file when this buffer was last loaded from
98 /// or saved to disk.
99 saved_mtime: Option<MTime>,
100 /// The version vector when this buffer was last loaded from
101 /// or saved to disk.
102 saved_version: clock::Global,
103 preview_version: clock::Global,
104 transaction_depth: usize,
105 was_dirty_before_starting_transaction: Option<bool>,
106 reload_task: Option<Task<Result<()>>>,
107 language: Option<Arc<Language>>,
108 autoindent_requests: Vec<Arc<AutoindentRequest>>,
109 pending_autoindent: Option<Task<()>>,
110 sync_parse_timeout: Duration,
111 syntax_map: Mutex<SyntaxMap>,
112 reparse: Option<Task<()>>,
113 parse_status: (watch::Sender<ParseStatus>, watch::Receiver<ParseStatus>),
114 non_text_state_update_count: usize,
115 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
116 remote_selections: TreeMap<ReplicaId, SelectionSet>,
117 diagnostics_timestamp: clock::Lamport,
118 completion_triggers: BTreeSet<String>,
119 completion_triggers_per_language_server: HashMap<LanguageServerId, BTreeSet<String>>,
120 completion_triggers_timestamp: clock::Lamport,
121 deferred_ops: OperationQueue<Operation>,
122 capability: Capability,
123 has_conflict: bool,
124 /// Memoize calls to has_changes_since(saved_version).
125 /// The contents of a cell are (self.version, has_changes) at the time of a last call.
126 has_unsaved_edits: Cell<(clock::Global, bool)>,
127 change_bits: Vec<rc::Weak<Cell<bool>>>,
128 _subscriptions: Vec<gpui::Subscription>,
129}
130
131#[derive(Copy, Clone, Debug, PartialEq, Eq)]
132pub enum ParseStatus {
133 Idle,
134 Parsing,
135}
136
137struct BufferBranchState {
138 base_buffer: Entity<Buffer>,
139 merged_operations: Vec<Lamport>,
140}
141
142/// An immutable, cheaply cloneable representation of a fixed
143/// state of a buffer.
144pub struct BufferSnapshot {
145 pub text: text::BufferSnapshot,
146 pub(crate) syntax: SyntaxSnapshot,
147 file: Option<Arc<dyn File>>,
148 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
149 remote_selections: TreeMap<ReplicaId, SelectionSet>,
150 language: Option<Arc<Language>>,
151 non_text_state_update_count: usize,
152}
153
154/// The kind and amount of indentation in a particular line. For now,
155/// assumes that indentation is all the same character.
156#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
157pub struct IndentSize {
158 /// The number of bytes that comprise the indentation.
159 pub len: u32,
160 /// The kind of whitespace used for indentation.
161 pub kind: IndentKind,
162}
163
164/// A whitespace character that's used for indentation.
165#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
166pub enum IndentKind {
167 /// An ASCII space character.
168 #[default]
169 Space,
170 /// An ASCII tab character.
171 Tab,
172}
173
174/// The shape of a selection cursor.
175#[derive(Copy, Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq, JsonSchema)]
176#[serde(rename_all = "snake_case")]
177pub enum CursorShape {
178 /// A vertical bar
179 #[default]
180 Bar,
181 /// A block that surrounds the following character
182 Block,
183 /// An underline that runs along the following character
184 Underline,
185 /// A box drawn around the following character
186 Hollow,
187}
188
189#[derive(Clone, Debug)]
190struct SelectionSet {
191 line_mode: bool,
192 cursor_shape: CursorShape,
193 selections: Arc<[Selection<Anchor>]>,
194 lamport_timestamp: clock::Lamport,
195}
196
197/// A diagnostic associated with a certain range of a buffer.
198#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
199pub struct Diagnostic {
200 /// The name of the service that produced this diagnostic.
201 pub source: Option<String>,
202 /// A machine-readable code that identifies this diagnostic.
203 pub code: Option<NumberOrString>,
204 pub code_description: Option<lsp::Url>,
205 /// Whether this diagnostic is a hint, warning, or error.
206 pub severity: DiagnosticSeverity,
207 /// The human-readable message associated with this diagnostic.
208 pub message: String,
209 /// The human-readable message (in markdown format)
210 pub markdown: Option<String>,
211 /// An id that identifies the group to which this diagnostic belongs.
212 ///
213 /// When a language server produces a diagnostic with
214 /// one or more associated diagnostics, those diagnostics are all
215 /// assigned a single group ID.
216 pub group_id: usize,
217 /// Whether this diagnostic is the primary diagnostic for its group.
218 ///
219 /// In a given group, the primary diagnostic is the top-level diagnostic
220 /// returned by the language server. The non-primary diagnostics are the
221 /// associated diagnostics.
222 pub is_primary: bool,
223 /// Whether this diagnostic is considered to originate from an analysis of
224 /// files on disk, as opposed to any unsaved buffer contents. This is a
225 /// property of a given diagnostic source, and is configured for a given
226 /// language server via the [`LspAdapter::disk_based_diagnostic_sources`](crate::LspAdapter::disk_based_diagnostic_sources) method
227 /// for the language server.
228 pub is_disk_based: bool,
229 /// Whether this diagnostic marks unnecessary code.
230 pub is_unnecessary: bool,
231 /// Quick separation of diagnostics groups based by their source.
232 pub source_kind: DiagnosticSourceKind,
233 /// Data from language server that produced this diagnostic. Passed back to the LS when we request code actions for this diagnostic.
234 pub data: Option<Value>,
235 /// Whether to underline the corresponding text range in the editor.
236 pub underline: bool,
237}
238
239#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)]
240pub enum DiagnosticSourceKind {
241 Pulled,
242 Pushed,
243 Other,
244}
245
246/// An operation used to synchronize this buffer with its other replicas.
247#[derive(Clone, Debug, PartialEq)]
248pub enum Operation {
249 /// A text operation.
250 Buffer(text::Operation),
251
252 /// An update to the buffer's diagnostics.
253 UpdateDiagnostics {
254 /// The id of the language server that produced the new diagnostics.
255 server_id: LanguageServerId,
256 /// The diagnostics.
257 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
258 /// The buffer's lamport timestamp.
259 lamport_timestamp: clock::Lamport,
260 },
261
262 /// An update to the most recent selections in this buffer.
263 UpdateSelections {
264 /// The selections.
265 selections: Arc<[Selection<Anchor>]>,
266 /// The buffer's lamport timestamp.
267 lamport_timestamp: clock::Lamport,
268 /// Whether the selections are in 'line mode'.
269 line_mode: bool,
270 /// The [`CursorShape`] associated with these selections.
271 cursor_shape: CursorShape,
272 },
273
274 /// An update to the characters that should trigger autocompletion
275 /// for this buffer.
276 UpdateCompletionTriggers {
277 /// The characters that trigger autocompletion.
278 triggers: Vec<String>,
279 /// The buffer's lamport timestamp.
280 lamport_timestamp: clock::Lamport,
281 /// The language server ID.
282 server_id: LanguageServerId,
283 },
284}
285
286/// An event that occurs in a buffer.
287#[derive(Clone, Debug, PartialEq)]
288pub enum BufferEvent {
289 /// The buffer was changed in a way that must be
290 /// propagated to its other replicas.
291 Operation {
292 operation: Operation,
293 is_local: bool,
294 },
295 /// The buffer was edited.
296 Edited,
297 /// The buffer's `dirty` bit changed.
298 DirtyChanged,
299 /// The buffer was saved.
300 Saved,
301 /// The buffer's file was changed on disk.
302 FileHandleChanged,
303 /// The buffer was reloaded.
304 Reloaded,
305 /// The buffer is in need of a reload
306 ReloadNeeded,
307 /// The buffer's language was changed.
308 LanguageChanged,
309 /// The buffer's syntax trees were updated.
310 Reparsed,
311 /// The buffer's diagnostics were updated.
312 DiagnosticsUpdated,
313 /// The buffer gained or lost editing capabilities.
314 CapabilityChanged,
315 /// The buffer was explicitly requested to close.
316 Closed,
317 /// The buffer was discarded when closing.
318 Discarded,
319}
320
321/// The file associated with a buffer.
322pub trait File: Send + Sync + Any {
323 /// Returns the [`LocalFile`] associated with this file, if the
324 /// file is local.
325 fn as_local(&self) -> Option<&dyn LocalFile>;
326
327 /// Returns whether this file is local.
328 fn is_local(&self) -> bool {
329 self.as_local().is_some()
330 }
331
332 /// Returns whether the file is new, exists in storage, or has been deleted. Includes metadata
333 /// only available in some states, such as modification time.
334 fn disk_state(&self) -> DiskState;
335
336 /// Returns the path of this file relative to the worktree's root directory.
337 fn path(&self) -> &Arc<Path>;
338
339 /// Returns the path of this file relative to the worktree's parent directory (this means it
340 /// includes the name of the worktree's root folder).
341 fn full_path(&self, cx: &App) -> PathBuf;
342
343 /// Returns the last component of this handle's absolute path. If this handle refers to the root
344 /// of its worktree, then this method will return the name of the worktree itself.
345 fn file_name<'a>(&'a self, cx: &'a App) -> &'a OsStr;
346
347 /// Returns the id of the worktree to which this file belongs.
348 ///
349 /// This is needed for looking up project-specific settings.
350 fn worktree_id(&self, cx: &App) -> WorktreeId;
351
352 /// Converts this file into a protobuf message.
353 fn to_proto(&self, cx: &App) -> rpc::proto::File;
354
355 /// Return whether Zed considers this to be a private file.
356 fn is_private(&self) -> bool;
357}
358
359/// The file's storage status - whether it's stored (`Present`), and if so when it was last
360/// modified. In the case where the file is not stored, it can be either `New` or `Deleted`. In the
361/// UI these two states are distinguished. For example, the buffer tab does not display a deletion
362/// indicator for new files.
363#[derive(Copy, Clone, Debug, PartialEq)]
364pub enum DiskState {
365 /// File created in Zed that has not been saved.
366 New,
367 /// File present on the filesystem.
368 Present { mtime: MTime },
369 /// Deleted file that was previously present.
370 Deleted,
371}
372
373impl DiskState {
374 /// Returns the file's last known modification time on disk.
375 pub fn mtime(self) -> Option<MTime> {
376 match self {
377 DiskState::New => None,
378 DiskState::Present { mtime } => Some(mtime),
379 DiskState::Deleted => None,
380 }
381 }
382
383 pub fn exists(&self) -> bool {
384 match self {
385 DiskState::New => false,
386 DiskState::Present { .. } => true,
387 DiskState::Deleted => false,
388 }
389 }
390}
391
392/// The file associated with a buffer, in the case where the file is on the local disk.
393pub trait LocalFile: File {
394 /// Returns the absolute path of this file
395 fn abs_path(&self, cx: &App) -> PathBuf;
396
397 /// Loads the file contents from disk and returns them as a UTF-8 encoded string.
398 fn load(&self, cx: &App) -> Task<Result<String>>;
399
400 /// Loads the file's contents from disk.
401 fn load_bytes(&self, cx: &App) -> Task<Result<Vec<u8>>>;
402}
403
404/// The auto-indent behavior associated with an editing operation.
405/// For some editing operations, each affected line of text has its
406/// indentation recomputed. For other operations, the entire block
407/// of edited text is adjusted uniformly.
408#[derive(Clone, Debug)]
409pub enum AutoindentMode {
410 /// Indent each line of inserted text.
411 EachLine,
412 /// Apply the same indentation adjustment to all of the lines
413 /// in a given insertion.
414 Block {
415 /// The original indentation column of the first line of each
416 /// insertion, if it has been copied.
417 ///
418 /// Knowing this makes it possible to preserve the relative indentation
419 /// of every line in the insertion from when it was copied.
420 ///
421 /// If the original indent column is `a`, and the first line of insertion
422 /// is then auto-indented to column `b`, then every other line of
423 /// the insertion will be auto-indented to column `b - a`
424 original_indent_columns: Vec<Option<u32>>,
425 },
426}
427
428#[derive(Clone)]
429struct AutoindentRequest {
430 before_edit: BufferSnapshot,
431 entries: Vec<AutoindentRequestEntry>,
432 is_block_mode: bool,
433 ignore_empty_lines: bool,
434}
435
436#[derive(Debug, Clone)]
437struct AutoindentRequestEntry {
438 /// A range of the buffer whose indentation should be adjusted.
439 range: Range<Anchor>,
440 /// Whether or not these lines should be considered brand new, for the
441 /// purpose of auto-indent. When text is not new, its indentation will
442 /// only be adjusted if the suggested indentation level has *changed*
443 /// since the edit was made.
444 first_line_is_new: bool,
445 indent_size: IndentSize,
446 original_indent_column: Option<u32>,
447}
448
449#[derive(Debug)]
450struct IndentSuggestion {
451 basis_row: u32,
452 delta: Ordering,
453 within_error: bool,
454}
455
456struct BufferChunkHighlights<'a> {
457 captures: SyntaxMapCaptures<'a>,
458 next_capture: Option<SyntaxMapCapture<'a>>,
459 stack: Vec<(usize, HighlightId)>,
460 highlight_maps: Vec<HighlightMap>,
461}
462
463/// An iterator that yields chunks of a buffer's text, along with their
464/// syntax highlights and diagnostic status.
465pub struct BufferChunks<'a> {
466 buffer_snapshot: Option<&'a BufferSnapshot>,
467 range: Range<usize>,
468 chunks: text::Chunks<'a>,
469 diagnostic_endpoints: Option<Peekable<vec::IntoIter<DiagnosticEndpoint>>>,
470 error_depth: usize,
471 warning_depth: usize,
472 information_depth: usize,
473 hint_depth: usize,
474 unnecessary_depth: usize,
475 underline: bool,
476 highlights: Option<BufferChunkHighlights<'a>>,
477}
478
479/// A chunk of a buffer's text, along with its syntax highlight and
480/// diagnostic status.
481#[derive(Clone, Debug, Default)]
482pub struct Chunk<'a> {
483 /// The text of the chunk.
484 pub text: &'a str,
485 /// The syntax highlighting style of the chunk.
486 pub syntax_highlight_id: Option<HighlightId>,
487 /// The highlight style that has been applied to this chunk in
488 /// the editor.
489 pub highlight_style: Option<HighlightStyle>,
490 /// The severity of diagnostic associated with this chunk, if any.
491 pub diagnostic_severity: Option<DiagnosticSeverity>,
492 /// Whether this chunk of text is marked as unnecessary.
493 pub is_unnecessary: bool,
494 /// Whether this chunk of text was originally a tab character.
495 pub is_tab: bool,
496 /// Whether this chunk of text was originally a tab character.
497 pub is_inlay: bool,
498 /// Whether to underline the corresponding text range in the editor.
499 pub underline: bool,
500}
501
502/// A set of edits to a given version of a buffer, computed asynchronously.
503#[derive(Debug)]
504pub struct Diff {
505 pub base_version: clock::Global,
506 pub line_ending: LineEnding,
507 pub edits: Vec<(Range<usize>, Arc<str>)>,
508}
509
510#[derive(Debug, Clone, Copy)]
511pub(crate) struct DiagnosticEndpoint {
512 offset: usize,
513 is_start: bool,
514 underline: bool,
515 severity: DiagnosticSeverity,
516 is_unnecessary: bool,
517}
518
519/// A class of characters, used for characterizing a run of text.
520#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
521pub enum CharKind {
522 /// Whitespace.
523 Whitespace,
524 /// Punctuation.
525 Punctuation,
526 /// Word.
527 Word,
528}
529
530/// A runnable is a set of data about a region that could be resolved into a task
531pub struct Runnable {
532 pub tags: SmallVec<[RunnableTag; 1]>,
533 pub language: Arc<Language>,
534 pub buffer: BufferId,
535}
536
537#[derive(Default, Clone, Debug)]
538pub struct HighlightedText {
539 pub text: SharedString,
540 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
541}
542
543#[derive(Default, Debug)]
544struct HighlightedTextBuilder {
545 pub text: String,
546 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
547}
548
549impl HighlightedText {
550 pub fn from_buffer_range<T: ToOffset>(
551 range: Range<T>,
552 snapshot: &text::BufferSnapshot,
553 syntax_snapshot: &SyntaxSnapshot,
554 override_style: Option<HighlightStyle>,
555 syntax_theme: &SyntaxTheme,
556 ) -> Self {
557 let mut highlighted_text = HighlightedTextBuilder::default();
558 highlighted_text.add_text_from_buffer_range(
559 range,
560 snapshot,
561 syntax_snapshot,
562 override_style,
563 syntax_theme,
564 );
565 highlighted_text.build()
566 }
567
568 pub fn to_styled_text(&self, default_style: &TextStyle) -> StyledText {
569 gpui::StyledText::new(self.text.clone())
570 .with_default_highlights(default_style, self.highlights.iter().cloned())
571 }
572
573 /// Returns the first line without leading whitespace unless highlighted
574 /// and a boolean indicating if there are more lines after
575 pub fn first_line_preview(self) -> (Self, bool) {
576 let newline_ix = self.text.find('\n').unwrap_or(self.text.len());
577 let first_line = &self.text[..newline_ix];
578
579 // Trim leading whitespace, unless an edit starts prior to it.
580 let mut preview_start_ix = first_line.len() - first_line.trim_start().len();
581 if let Some((first_highlight_range, _)) = self.highlights.first() {
582 preview_start_ix = preview_start_ix.min(first_highlight_range.start);
583 }
584
585 let preview_text = &first_line[preview_start_ix..];
586 let preview_highlights = self
587 .highlights
588 .into_iter()
589 .take_while(|(range, _)| range.start < newline_ix)
590 .filter_map(|(mut range, highlight)| {
591 range.start = range.start.saturating_sub(preview_start_ix);
592 range.end = range.end.saturating_sub(preview_start_ix).min(newline_ix);
593 if range.is_empty() {
594 None
595 } else {
596 Some((range, highlight))
597 }
598 });
599
600 let preview = Self {
601 text: SharedString::new(preview_text),
602 highlights: preview_highlights.collect(),
603 };
604
605 (preview, self.text.len() > newline_ix)
606 }
607}
608
609impl HighlightedTextBuilder {
610 pub fn build(self) -> HighlightedText {
611 HighlightedText {
612 text: self.text.into(),
613 highlights: self.highlights,
614 }
615 }
616
617 pub fn add_text_from_buffer_range<T: ToOffset>(
618 &mut self,
619 range: Range<T>,
620 snapshot: &text::BufferSnapshot,
621 syntax_snapshot: &SyntaxSnapshot,
622 override_style: Option<HighlightStyle>,
623 syntax_theme: &SyntaxTheme,
624 ) {
625 let range = range.to_offset(snapshot);
626 for chunk in Self::highlighted_chunks(range, snapshot, syntax_snapshot) {
627 let start = self.text.len();
628 self.text.push_str(chunk.text);
629 let end = self.text.len();
630
631 if let Some(mut highlight_style) = chunk
632 .syntax_highlight_id
633 .and_then(|id| id.style(syntax_theme))
634 {
635 if let Some(override_style) = override_style {
636 highlight_style.highlight(override_style);
637 }
638 self.highlights.push((start..end, highlight_style));
639 } else if let Some(override_style) = override_style {
640 self.highlights.push((start..end, override_style));
641 }
642 }
643 }
644
645 fn highlighted_chunks<'a>(
646 range: Range<usize>,
647 snapshot: &'a text::BufferSnapshot,
648 syntax_snapshot: &'a SyntaxSnapshot,
649 ) -> BufferChunks<'a> {
650 let captures = syntax_snapshot.captures(range.clone(), snapshot, |grammar| {
651 grammar.highlights_query.as_ref()
652 });
653
654 let highlight_maps = captures
655 .grammars()
656 .iter()
657 .map(|grammar| grammar.highlight_map())
658 .collect();
659
660 BufferChunks::new(
661 snapshot.as_rope(),
662 range,
663 Some((captures, highlight_maps)),
664 false,
665 None,
666 )
667 }
668}
669
670#[derive(Clone)]
671pub struct EditPreview {
672 old_snapshot: text::BufferSnapshot,
673 applied_edits_snapshot: text::BufferSnapshot,
674 syntax_snapshot: SyntaxSnapshot,
675}
676
677impl EditPreview {
678 pub fn highlight_edits(
679 &self,
680 current_snapshot: &BufferSnapshot,
681 edits: &[(Range<Anchor>, String)],
682 include_deletions: bool,
683 cx: &App,
684 ) -> HighlightedText {
685 let Some(visible_range_in_preview_snapshot) = self.compute_visible_range(edits) else {
686 return HighlightedText::default();
687 };
688
689 let mut highlighted_text = HighlightedTextBuilder::default();
690
691 let mut offset_in_preview_snapshot = visible_range_in_preview_snapshot.start;
692
693 let insertion_highlight_style = HighlightStyle {
694 background_color: Some(cx.theme().status().created_background),
695 ..Default::default()
696 };
697 let deletion_highlight_style = HighlightStyle {
698 background_color: Some(cx.theme().status().deleted_background),
699 ..Default::default()
700 };
701 let syntax_theme = cx.theme().syntax();
702
703 for (range, edit_text) in edits {
704 let edit_new_end_in_preview_snapshot = range
705 .end
706 .bias_right(&self.old_snapshot)
707 .to_offset(&self.applied_edits_snapshot);
708 let edit_start_in_preview_snapshot = edit_new_end_in_preview_snapshot - edit_text.len();
709
710 let unchanged_range_in_preview_snapshot =
711 offset_in_preview_snapshot..edit_start_in_preview_snapshot;
712 if !unchanged_range_in_preview_snapshot.is_empty() {
713 highlighted_text.add_text_from_buffer_range(
714 unchanged_range_in_preview_snapshot,
715 &self.applied_edits_snapshot,
716 &self.syntax_snapshot,
717 None,
718 &syntax_theme,
719 );
720 }
721
722 let range_in_current_snapshot = range.to_offset(current_snapshot);
723 if include_deletions && !range_in_current_snapshot.is_empty() {
724 highlighted_text.add_text_from_buffer_range(
725 range_in_current_snapshot,
726 ¤t_snapshot.text,
727 ¤t_snapshot.syntax,
728 Some(deletion_highlight_style),
729 &syntax_theme,
730 );
731 }
732
733 if !edit_text.is_empty() {
734 highlighted_text.add_text_from_buffer_range(
735 edit_start_in_preview_snapshot..edit_new_end_in_preview_snapshot,
736 &self.applied_edits_snapshot,
737 &self.syntax_snapshot,
738 Some(insertion_highlight_style),
739 &syntax_theme,
740 );
741 }
742
743 offset_in_preview_snapshot = edit_new_end_in_preview_snapshot;
744 }
745
746 highlighted_text.add_text_from_buffer_range(
747 offset_in_preview_snapshot..visible_range_in_preview_snapshot.end,
748 &self.applied_edits_snapshot,
749 &self.syntax_snapshot,
750 None,
751 &syntax_theme,
752 );
753
754 highlighted_text.build()
755 }
756
757 fn compute_visible_range(&self, edits: &[(Range<Anchor>, String)]) -> Option<Range<usize>> {
758 let (first, _) = edits.first()?;
759 let (last, _) = edits.last()?;
760
761 let start = first
762 .start
763 .bias_left(&self.old_snapshot)
764 .to_point(&self.applied_edits_snapshot);
765 let end = last
766 .end
767 .bias_right(&self.old_snapshot)
768 .to_point(&self.applied_edits_snapshot);
769
770 // Ensure that the first line of the first edit and the last line of the last edit are always fully visible
771 let range = Point::new(start.row, 0)
772 ..Point::new(end.row, self.applied_edits_snapshot.line_len(end.row));
773
774 Some(range.to_offset(&self.applied_edits_snapshot))
775 }
776}
777
778#[derive(Clone, Debug, PartialEq, Eq)]
779pub struct BracketMatch {
780 pub open_range: Range<usize>,
781 pub close_range: Range<usize>,
782 pub newline_only: bool,
783}
784
785impl Buffer {
786 /// Create a new buffer with the given base text.
787 pub fn local<T: Into<String>>(base_text: T, cx: &Context<Self>) -> Self {
788 Self::build(
789 TextBuffer::new(0, cx.entity_id().as_non_zero_u64().into(), base_text.into()),
790 None,
791 Capability::ReadWrite,
792 )
793 }
794
795 /// Create a new buffer with the given base text that has proper line endings and other normalization applied.
796 pub fn local_normalized(
797 base_text_normalized: Rope,
798 line_ending: LineEnding,
799 cx: &Context<Self>,
800 ) -> Self {
801 Self::build(
802 TextBuffer::new_normalized(
803 0,
804 cx.entity_id().as_non_zero_u64().into(),
805 line_ending,
806 base_text_normalized,
807 ),
808 None,
809 Capability::ReadWrite,
810 )
811 }
812
813 /// Create a new buffer that is a replica of a remote buffer.
814 pub fn remote(
815 remote_id: BufferId,
816 replica_id: ReplicaId,
817 capability: Capability,
818 base_text: impl Into<String>,
819 ) -> Self {
820 Self::build(
821 TextBuffer::new(replica_id, remote_id, base_text.into()),
822 None,
823 capability,
824 )
825 }
826
827 /// Create a new buffer that is a replica of a remote buffer, populating its
828 /// state from the given protobuf message.
829 pub fn from_proto(
830 replica_id: ReplicaId,
831 capability: Capability,
832 message: proto::BufferState,
833 file: Option<Arc<dyn File>>,
834 ) -> Result<Self> {
835 let buffer_id = BufferId::new(message.id).context("Could not deserialize buffer_id")?;
836 let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text);
837 let mut this = Self::build(buffer, file, capability);
838 this.text.set_line_ending(proto::deserialize_line_ending(
839 rpc::proto::LineEnding::from_i32(message.line_ending).context("missing line_ending")?,
840 ));
841 this.saved_version = proto::deserialize_version(&message.saved_version);
842 this.saved_mtime = message.saved_mtime.map(|time| time.into());
843 Ok(this)
844 }
845
846 /// Serialize the buffer's state to a protobuf message.
847 pub fn to_proto(&self, cx: &App) -> proto::BufferState {
848 proto::BufferState {
849 id: self.remote_id().into(),
850 file: self.file.as_ref().map(|f| f.to_proto(cx)),
851 base_text: self.base_text().to_string(),
852 line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
853 saved_version: proto::serialize_version(&self.saved_version),
854 saved_mtime: self.saved_mtime.map(|time| time.into()),
855 }
856 }
857
858 /// Serialize as protobufs all of the changes to the buffer since the given version.
859 pub fn serialize_ops(
860 &self,
861 since: Option<clock::Global>,
862 cx: &App,
863 ) -> Task<Vec<proto::Operation>> {
864 let mut operations = Vec::new();
865 operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
866
867 operations.extend(self.remote_selections.iter().map(|(_, set)| {
868 proto::serialize_operation(&Operation::UpdateSelections {
869 selections: set.selections.clone(),
870 lamport_timestamp: set.lamport_timestamp,
871 line_mode: set.line_mode,
872 cursor_shape: set.cursor_shape,
873 })
874 }));
875
876 for (server_id, diagnostics) in &self.diagnostics {
877 operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
878 lamport_timestamp: self.diagnostics_timestamp,
879 server_id: *server_id,
880 diagnostics: diagnostics.iter().cloned().collect(),
881 }));
882 }
883
884 for (server_id, completions) in &self.completion_triggers_per_language_server {
885 operations.push(proto::serialize_operation(
886 &Operation::UpdateCompletionTriggers {
887 triggers: completions.iter().cloned().collect(),
888 lamport_timestamp: self.completion_triggers_timestamp,
889 server_id: *server_id,
890 },
891 ));
892 }
893
894 let text_operations = self.text.operations().clone();
895 cx.background_spawn(async move {
896 let since = since.unwrap_or_default();
897 operations.extend(
898 text_operations
899 .iter()
900 .filter(|(_, op)| !since.observed(op.timestamp()))
901 .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
902 );
903 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
904 operations
905 })
906 }
907
908 /// Assign a language to the buffer, returning the buffer.
909 pub fn with_language(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
910 self.set_language(Some(language), cx);
911 self
912 }
913
914 /// Returns the [`Capability`] of this buffer.
915 pub fn capability(&self) -> Capability {
916 self.capability
917 }
918
919 /// Whether this buffer can only be read.
920 pub fn read_only(&self) -> bool {
921 self.capability == Capability::ReadOnly
922 }
923
924 /// Builds a [`Buffer`] with the given underlying [`TextBuffer`], diff base, [`File`] and [`Capability`].
925 pub fn build(buffer: TextBuffer, file: Option<Arc<dyn File>>, capability: Capability) -> Self {
926 let saved_mtime = file.as_ref().and_then(|file| file.disk_state().mtime());
927 let snapshot = buffer.snapshot();
928 let syntax_map = Mutex::new(SyntaxMap::new(&snapshot));
929 Self {
930 saved_mtime,
931 saved_version: buffer.version(),
932 preview_version: buffer.version(),
933 reload_task: None,
934 transaction_depth: 0,
935 was_dirty_before_starting_transaction: None,
936 has_unsaved_edits: Cell::new((buffer.version(), false)),
937 text: buffer,
938 branch_state: None,
939 file,
940 capability,
941 syntax_map,
942 reparse: None,
943 non_text_state_update_count: 0,
944 sync_parse_timeout: Duration::from_millis(1),
945 parse_status: watch::channel(ParseStatus::Idle),
946 autoindent_requests: Default::default(),
947 pending_autoindent: Default::default(),
948 language: None,
949 remote_selections: Default::default(),
950 diagnostics: Default::default(),
951 diagnostics_timestamp: Default::default(),
952 completion_triggers: Default::default(),
953 completion_triggers_per_language_server: Default::default(),
954 completion_triggers_timestamp: Default::default(),
955 deferred_ops: OperationQueue::new(),
956 has_conflict: false,
957 change_bits: Default::default(),
958 _subscriptions: Vec::new(),
959 }
960 }
961
962 pub fn build_snapshot(
963 text: Rope,
964 language: Option<Arc<Language>>,
965 language_registry: Option<Arc<LanguageRegistry>>,
966 cx: &mut App,
967 ) -> impl Future<Output = BufferSnapshot> + use<> {
968 let entity_id = cx.reserve_entity::<Self>().entity_id();
969 let buffer_id = entity_id.as_non_zero_u64().into();
970 async move {
971 let text =
972 TextBuffer::new_normalized(0, buffer_id, Default::default(), text).snapshot();
973 let mut syntax = SyntaxMap::new(&text).snapshot();
974 if let Some(language) = language.clone() {
975 let text = text.clone();
976 let language = language.clone();
977 let language_registry = language_registry.clone();
978 syntax.reparse(&text, language_registry, language);
979 }
980 BufferSnapshot {
981 text,
982 syntax,
983 file: None,
984 diagnostics: Default::default(),
985 remote_selections: Default::default(),
986 language,
987 non_text_state_update_count: 0,
988 }
989 }
990 }
991
992 pub fn build_empty_snapshot(cx: &mut App) -> BufferSnapshot {
993 let entity_id = cx.reserve_entity::<Self>().entity_id();
994 let buffer_id = entity_id.as_non_zero_u64().into();
995 let text =
996 TextBuffer::new_normalized(0, buffer_id, Default::default(), Rope::new()).snapshot();
997 let syntax = SyntaxMap::new(&text).snapshot();
998 BufferSnapshot {
999 text,
1000 syntax,
1001 file: None,
1002 diagnostics: Default::default(),
1003 remote_selections: Default::default(),
1004 language: None,
1005 non_text_state_update_count: 0,
1006 }
1007 }
1008
1009 #[cfg(any(test, feature = "test-support"))]
1010 pub fn build_snapshot_sync(
1011 text: Rope,
1012 language: Option<Arc<Language>>,
1013 language_registry: Option<Arc<LanguageRegistry>>,
1014 cx: &mut App,
1015 ) -> BufferSnapshot {
1016 let entity_id = cx.reserve_entity::<Self>().entity_id();
1017 let buffer_id = entity_id.as_non_zero_u64().into();
1018 let text = TextBuffer::new_normalized(0, buffer_id, Default::default(), text).snapshot();
1019 let mut syntax = SyntaxMap::new(&text).snapshot();
1020 if let Some(language) = language.clone() {
1021 let text = text.clone();
1022 let language = language.clone();
1023 let language_registry = language_registry.clone();
1024 syntax.reparse(&text, language_registry, language);
1025 }
1026 BufferSnapshot {
1027 text,
1028 syntax,
1029 file: None,
1030 diagnostics: Default::default(),
1031 remote_selections: Default::default(),
1032 language,
1033 non_text_state_update_count: 0,
1034 }
1035 }
1036
1037 /// Retrieve a snapshot of the buffer's current state. This is computationally
1038 /// cheap, and allows reading from the buffer on a background thread.
1039 pub fn snapshot(&self) -> BufferSnapshot {
1040 let text = self.text.snapshot();
1041 let mut syntax_map = self.syntax_map.lock();
1042 syntax_map.interpolate(&text);
1043 let syntax = syntax_map.snapshot();
1044
1045 BufferSnapshot {
1046 text,
1047 syntax,
1048 file: self.file.clone(),
1049 remote_selections: self.remote_selections.clone(),
1050 diagnostics: self.diagnostics.clone(),
1051 language: self.language.clone(),
1052 non_text_state_update_count: self.non_text_state_update_count,
1053 }
1054 }
1055
1056 pub fn branch(&mut self, cx: &mut Context<Self>) -> Entity<Self> {
1057 let this = cx.entity();
1058 cx.new(|cx| {
1059 let mut branch = Self {
1060 branch_state: Some(BufferBranchState {
1061 base_buffer: this.clone(),
1062 merged_operations: Default::default(),
1063 }),
1064 language: self.language.clone(),
1065 has_conflict: self.has_conflict,
1066 has_unsaved_edits: Cell::new(self.has_unsaved_edits.get_mut().clone()),
1067 _subscriptions: vec![cx.subscribe(&this, Self::on_base_buffer_event)],
1068 ..Self::build(self.text.branch(), self.file.clone(), self.capability())
1069 };
1070 if let Some(language_registry) = self.language_registry() {
1071 branch.set_language_registry(language_registry);
1072 }
1073
1074 // Reparse the branch buffer so that we get syntax highlighting immediately.
1075 branch.reparse(cx);
1076
1077 branch
1078 })
1079 }
1080
1081 pub fn preview_edits(
1082 &self,
1083 edits: Arc<[(Range<Anchor>, String)]>,
1084 cx: &App,
1085 ) -> Task<EditPreview> {
1086 let registry = self.language_registry();
1087 let language = self.language().cloned();
1088 let old_snapshot = self.text.snapshot();
1089 let mut branch_buffer = self.text.branch();
1090 let mut syntax_snapshot = self.syntax_map.lock().snapshot();
1091 cx.background_spawn(async move {
1092 if !edits.is_empty() {
1093 if let Some(language) = language.clone() {
1094 syntax_snapshot.reparse(&old_snapshot, registry.clone(), language);
1095 }
1096
1097 branch_buffer.edit(edits.iter().cloned());
1098 let snapshot = branch_buffer.snapshot();
1099 syntax_snapshot.interpolate(&snapshot);
1100
1101 if let Some(language) = language {
1102 syntax_snapshot.reparse(&snapshot, registry, language);
1103 }
1104 }
1105 EditPreview {
1106 old_snapshot,
1107 applied_edits_snapshot: branch_buffer.snapshot(),
1108 syntax_snapshot,
1109 }
1110 })
1111 }
1112
1113 /// Applies all of the changes in this buffer that intersect any of the
1114 /// given `ranges` to its base buffer.
1115 ///
1116 /// If `ranges` is empty, then all changes will be applied. This buffer must
1117 /// be a branch buffer to call this method.
1118 pub fn merge_into_base(&mut self, ranges: Vec<Range<usize>>, cx: &mut Context<Self>) {
1119 let Some(base_buffer) = self.base_buffer() else {
1120 debug_panic!("not a branch buffer");
1121 return;
1122 };
1123
1124 let mut ranges = if ranges.is_empty() {
1125 &[0..usize::MAX]
1126 } else {
1127 ranges.as_slice()
1128 }
1129 .into_iter()
1130 .peekable();
1131
1132 let mut edits = Vec::new();
1133 for edit in self.edits_since::<usize>(&base_buffer.read(cx).version()) {
1134 let mut is_included = false;
1135 while let Some(range) = ranges.peek() {
1136 if range.end < edit.new.start {
1137 ranges.next().unwrap();
1138 } else {
1139 if range.start <= edit.new.end {
1140 is_included = true;
1141 }
1142 break;
1143 }
1144 }
1145
1146 if is_included {
1147 edits.push((
1148 edit.old.clone(),
1149 self.text_for_range(edit.new.clone()).collect::<String>(),
1150 ));
1151 }
1152 }
1153
1154 let operation = base_buffer.update(cx, |base_buffer, cx| {
1155 // cx.emit(BufferEvent::DiffBaseChanged);
1156 base_buffer.edit(edits, None, cx)
1157 });
1158
1159 if let Some(operation) = operation {
1160 if let Some(BufferBranchState {
1161 merged_operations, ..
1162 }) = &mut self.branch_state
1163 {
1164 merged_operations.push(operation);
1165 }
1166 }
1167 }
1168
1169 fn on_base_buffer_event(
1170 &mut self,
1171 _: Entity<Buffer>,
1172 event: &BufferEvent,
1173 cx: &mut Context<Self>,
1174 ) {
1175 let BufferEvent::Operation { operation, .. } = event else {
1176 return;
1177 };
1178 let Some(BufferBranchState {
1179 merged_operations, ..
1180 }) = &mut self.branch_state
1181 else {
1182 return;
1183 };
1184
1185 let mut operation_to_undo = None;
1186 if let Operation::Buffer(text::Operation::Edit(operation)) = &operation {
1187 if let Ok(ix) = merged_operations.binary_search(&operation.timestamp) {
1188 merged_operations.remove(ix);
1189 operation_to_undo = Some(operation.timestamp);
1190 }
1191 }
1192
1193 self.apply_ops([operation.clone()], cx);
1194
1195 if let Some(timestamp) = operation_to_undo {
1196 let counts = [(timestamp, u32::MAX)].into_iter().collect();
1197 self.undo_operations(counts, cx);
1198 }
1199 }
1200
1201 #[cfg(test)]
1202 pub(crate) fn as_text_snapshot(&self) -> &text::BufferSnapshot {
1203 &self.text
1204 }
1205
1206 /// Retrieve a snapshot of the buffer's raw text, without any
1207 /// language-related state like the syntax tree or diagnostics.
1208 pub fn text_snapshot(&self) -> text::BufferSnapshot {
1209 self.text.snapshot()
1210 }
1211
1212 /// The file associated with the buffer, if any.
1213 pub fn file(&self) -> Option<&Arc<dyn File>> {
1214 self.file.as_ref()
1215 }
1216
1217 /// The version of the buffer that was last saved or reloaded from disk.
1218 pub fn saved_version(&self) -> &clock::Global {
1219 &self.saved_version
1220 }
1221
1222 /// The mtime of the buffer's file when the buffer was last saved or reloaded from disk.
1223 pub fn saved_mtime(&self) -> Option<MTime> {
1224 self.saved_mtime
1225 }
1226
1227 /// Assign a language to the buffer.
1228 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1229 self.non_text_state_update_count += 1;
1230 self.syntax_map.lock().clear(&self.text);
1231 self.language = language;
1232 self.was_changed();
1233 self.reparse(cx);
1234 cx.emit(BufferEvent::LanguageChanged);
1235 }
1236
1237 /// Assign a language registry to the buffer. This allows the buffer to retrieve
1238 /// other languages if parts of the buffer are written in different languages.
1239 pub fn set_language_registry(&self, language_registry: Arc<LanguageRegistry>) {
1240 self.syntax_map
1241 .lock()
1242 .set_language_registry(language_registry);
1243 }
1244
1245 pub fn language_registry(&self) -> Option<Arc<LanguageRegistry>> {
1246 self.syntax_map.lock().language_registry()
1247 }
1248
1249 /// Assign the buffer a new [`Capability`].
1250 pub fn set_capability(&mut self, capability: Capability, cx: &mut Context<Self>) {
1251 self.capability = capability;
1252 cx.emit(BufferEvent::CapabilityChanged)
1253 }
1254
1255 /// This method is called to signal that the buffer has been saved.
1256 pub fn did_save(
1257 &mut self,
1258 version: clock::Global,
1259 mtime: Option<MTime>,
1260 cx: &mut Context<Self>,
1261 ) {
1262 self.saved_version = version;
1263 self.has_unsaved_edits
1264 .set((self.saved_version().clone(), false));
1265 self.has_conflict = false;
1266 self.saved_mtime = mtime;
1267 self.was_changed();
1268 cx.emit(BufferEvent::Saved);
1269 cx.notify();
1270 }
1271
1272 /// This method is called to signal that the buffer has been discarded.
1273 pub fn discarded(&self, cx: &mut Context<Self>) {
1274 cx.emit(BufferEvent::Discarded);
1275 cx.notify();
1276 }
1277
1278 /// Reloads the contents of the buffer from disk.
1279 pub fn reload(&mut self, cx: &Context<Self>) -> oneshot::Receiver<Option<Transaction>> {
1280 let (tx, rx) = futures::channel::oneshot::channel();
1281 let prev_version = self.text.version();
1282 self.reload_task = Some(cx.spawn(async move |this, cx| {
1283 let Some((new_mtime, new_text)) = this.update(cx, |this, cx| {
1284 let file = this.file.as_ref()?.as_local()?;
1285
1286 Some((file.disk_state().mtime(), file.load(cx)))
1287 })?
1288 else {
1289 return Ok(());
1290 };
1291
1292 let new_text = new_text.await?;
1293 let diff = this
1294 .update(cx, |this, cx| this.diff(new_text.clone(), cx))?
1295 .await;
1296 this.update(cx, |this, cx| {
1297 if this.version() == diff.base_version {
1298 this.finalize_last_transaction();
1299 this.apply_diff(diff, cx);
1300 tx.send(this.finalize_last_transaction().cloned()).ok();
1301 this.has_conflict = false;
1302 this.did_reload(this.version(), this.line_ending(), new_mtime, cx);
1303 } else {
1304 if !diff.edits.is_empty()
1305 || this
1306 .edits_since::<usize>(&diff.base_version)
1307 .next()
1308 .is_some()
1309 {
1310 this.has_conflict = true;
1311 }
1312
1313 this.did_reload(prev_version, this.line_ending(), this.saved_mtime, cx);
1314 }
1315
1316 this.reload_task.take();
1317 })
1318 }));
1319 rx
1320 }
1321
1322 /// This method is called to signal that the buffer has been reloaded.
1323 pub fn did_reload(
1324 &mut self,
1325 version: clock::Global,
1326 line_ending: LineEnding,
1327 mtime: Option<MTime>,
1328 cx: &mut Context<Self>,
1329 ) {
1330 self.saved_version = version;
1331 self.has_unsaved_edits
1332 .set((self.saved_version.clone(), false));
1333 self.text.set_line_ending(line_ending);
1334 self.saved_mtime = mtime;
1335 cx.emit(BufferEvent::Reloaded);
1336 cx.notify();
1337 }
1338
1339 /// Updates the [`File`] backing this buffer. This should be called when
1340 /// the file has changed or has been deleted.
1341 pub fn file_updated(&mut self, new_file: Arc<dyn File>, cx: &mut Context<Self>) {
1342 let was_dirty = self.is_dirty();
1343 let mut file_changed = false;
1344
1345 if let Some(old_file) = self.file.as_ref() {
1346 if new_file.path() != old_file.path() {
1347 file_changed = true;
1348 }
1349
1350 let old_state = old_file.disk_state();
1351 let new_state = new_file.disk_state();
1352 if old_state != new_state {
1353 file_changed = true;
1354 if !was_dirty && matches!(new_state, DiskState::Present { .. }) {
1355 cx.emit(BufferEvent::ReloadNeeded)
1356 }
1357 }
1358 } else {
1359 file_changed = true;
1360 };
1361
1362 self.file = Some(new_file);
1363 if file_changed {
1364 self.was_changed();
1365 self.non_text_state_update_count += 1;
1366 if was_dirty != self.is_dirty() {
1367 cx.emit(BufferEvent::DirtyChanged);
1368 }
1369 cx.emit(BufferEvent::FileHandleChanged);
1370 cx.notify();
1371 }
1372 }
1373
1374 pub fn base_buffer(&self) -> Option<Entity<Self>> {
1375 Some(self.branch_state.as_ref()?.base_buffer.clone())
1376 }
1377
1378 /// Returns the primary [`Language`] assigned to this [`Buffer`].
1379 pub fn language(&self) -> Option<&Arc<Language>> {
1380 self.language.as_ref()
1381 }
1382
1383 /// Returns the [`Language`] at the given location.
1384 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
1385 let offset = position.to_offset(self);
1386 let mut is_first = true;
1387 let start_anchor = self.anchor_before(offset);
1388 let end_anchor = self.anchor_after(offset);
1389 self.syntax_map
1390 .lock()
1391 .layers_for_range(offset..offset, &self.text, false)
1392 .filter(|layer| {
1393 if is_first {
1394 is_first = false;
1395 return true;
1396 }
1397 let any_sub_ranges_contain_range = layer
1398 .included_sub_ranges
1399 .map(|sub_ranges| {
1400 sub_ranges.iter().any(|sub_range| {
1401 let is_before_start = sub_range.end.cmp(&start_anchor, self).is_lt();
1402 let is_after_end = sub_range.start.cmp(&end_anchor, self).is_gt();
1403 !is_before_start && !is_after_end
1404 })
1405 })
1406 .unwrap_or(true);
1407 let result = any_sub_ranges_contain_range;
1408 return result;
1409 })
1410 .last()
1411 .map(|info| info.language.clone())
1412 .or_else(|| self.language.clone())
1413 }
1414
1415 /// Returns each [`Language`] for the active syntax layers at the given location.
1416 pub fn languages_at<D: ToOffset>(&self, position: D) -> Vec<Arc<Language>> {
1417 let offset = position.to_offset(self);
1418 let mut languages: Vec<Arc<Language>> = self
1419 .syntax_map
1420 .lock()
1421 .layers_for_range(offset..offset, &self.text, false)
1422 .map(|info| info.language.clone())
1423 .collect();
1424
1425 if languages.is_empty() {
1426 if let Some(buffer_language) = self.language() {
1427 languages.push(buffer_language.clone());
1428 }
1429 }
1430
1431 languages
1432 }
1433
1434 /// An integer version number that accounts for all updates besides
1435 /// the buffer's text itself (which is versioned via a version vector).
1436 pub fn non_text_state_update_count(&self) -> usize {
1437 self.non_text_state_update_count
1438 }
1439
1440 /// Whether the buffer is being parsed in the background.
1441 #[cfg(any(test, feature = "test-support"))]
1442 pub fn is_parsing(&self) -> bool {
1443 self.reparse.is_some()
1444 }
1445
1446 /// Indicates whether the buffer contains any regions that may be
1447 /// written in a language that hasn't been loaded yet.
1448 pub fn contains_unknown_injections(&self) -> bool {
1449 self.syntax_map.lock().contains_unknown_injections()
1450 }
1451
1452 #[cfg(test)]
1453 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
1454 self.sync_parse_timeout = timeout;
1455 }
1456
1457 /// Called after an edit to synchronize the buffer's main parse tree with
1458 /// the buffer's new underlying state.
1459 ///
1460 /// Locks the syntax map and interpolates the edits since the last reparse
1461 /// into the foreground syntax tree.
1462 ///
1463 /// Then takes a stable snapshot of the syntax map before unlocking it.
1464 /// The snapshot with the interpolated edits is sent to a background thread,
1465 /// where we ask Tree-sitter to perform an incremental parse.
1466 ///
1467 /// Meanwhile, in the foreground, we block the main thread for up to 1ms
1468 /// waiting on the parse to complete. As soon as it completes, we proceed
1469 /// synchronously, unless a 1ms timeout elapses.
1470 ///
1471 /// If we time out waiting on the parse, we spawn a second task waiting
1472 /// until the parse does complete and return with the interpolated tree still
1473 /// in the foreground. When the background parse completes, call back into
1474 /// the main thread and assign the foreground parse state.
1475 ///
1476 /// If the buffer or grammar changed since the start of the background parse,
1477 /// initiate an additional reparse recursively. To avoid concurrent parses
1478 /// for the same buffer, we only initiate a new parse if we are not already
1479 /// parsing in the background.
1480 pub fn reparse(&mut self, cx: &mut Context<Self>) {
1481 if self.reparse.is_some() {
1482 return;
1483 }
1484 let language = if let Some(language) = self.language.clone() {
1485 language
1486 } else {
1487 return;
1488 };
1489
1490 let text = self.text_snapshot();
1491 let parsed_version = self.version();
1492
1493 let mut syntax_map = self.syntax_map.lock();
1494 syntax_map.interpolate(&text);
1495 let language_registry = syntax_map.language_registry();
1496 let mut syntax_snapshot = syntax_map.snapshot();
1497 drop(syntax_map);
1498
1499 let parse_task = cx.background_spawn({
1500 let language = language.clone();
1501 let language_registry = language_registry.clone();
1502 async move {
1503 syntax_snapshot.reparse(&text, language_registry, language);
1504 syntax_snapshot
1505 }
1506 });
1507
1508 self.parse_status.0.send(ParseStatus::Parsing).unwrap();
1509 match cx
1510 .background_executor()
1511 .block_with_timeout(self.sync_parse_timeout, parse_task)
1512 {
1513 Ok(new_syntax_snapshot) => {
1514 self.did_finish_parsing(new_syntax_snapshot, cx);
1515 self.reparse = None;
1516 }
1517 Err(parse_task) => {
1518 self.reparse = Some(cx.spawn(async move |this, cx| {
1519 let new_syntax_map = parse_task.await;
1520 this.update(cx, move |this, cx| {
1521 let grammar_changed =
1522 this.language.as_ref().map_or(true, |current_language| {
1523 !Arc::ptr_eq(&language, current_language)
1524 });
1525 let language_registry_changed = new_syntax_map
1526 .contains_unknown_injections()
1527 && language_registry.map_or(false, |registry| {
1528 registry.version() != new_syntax_map.language_registry_version()
1529 });
1530 let parse_again = language_registry_changed
1531 || grammar_changed
1532 || this.version.changed_since(&parsed_version);
1533 this.did_finish_parsing(new_syntax_map, cx);
1534 this.reparse = None;
1535 if parse_again {
1536 this.reparse(cx);
1537 }
1538 })
1539 .ok();
1540 }));
1541 }
1542 }
1543 }
1544
1545 fn did_finish_parsing(&mut self, syntax_snapshot: SyntaxSnapshot, cx: &mut Context<Self>) {
1546 self.was_changed();
1547 self.non_text_state_update_count += 1;
1548 self.syntax_map.lock().did_parse(syntax_snapshot);
1549 self.request_autoindent(cx);
1550 self.parse_status.0.send(ParseStatus::Idle).unwrap();
1551 cx.emit(BufferEvent::Reparsed);
1552 cx.notify();
1553 }
1554
1555 pub fn parse_status(&self) -> watch::Receiver<ParseStatus> {
1556 self.parse_status.1.clone()
1557 }
1558
1559 /// Assign to the buffer a set of diagnostics created by a given language server.
1560 pub fn update_diagnostics(
1561 &mut self,
1562 server_id: LanguageServerId,
1563 diagnostics: DiagnosticSet,
1564 cx: &mut Context<Self>,
1565 ) {
1566 let lamport_timestamp = self.text.lamport_clock.tick();
1567 let op = Operation::UpdateDiagnostics {
1568 server_id,
1569 diagnostics: diagnostics.iter().cloned().collect(),
1570 lamport_timestamp,
1571 };
1572 self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
1573 self.send_operation(op, true, cx);
1574 }
1575
1576 pub fn get_diagnostics(&self, server_id: LanguageServerId) -> Option<&DiagnosticSet> {
1577 let Ok(idx) = self.diagnostics.binary_search_by_key(&server_id, |v| v.0) else {
1578 return None;
1579 };
1580 Some(&self.diagnostics[idx].1)
1581 }
1582
1583 fn request_autoindent(&mut self, cx: &mut Context<Self>) {
1584 if let Some(indent_sizes) = self.compute_autoindents() {
1585 let indent_sizes = cx.background_spawn(indent_sizes);
1586 match cx
1587 .background_executor()
1588 .block_with_timeout(Duration::from_micros(500), indent_sizes)
1589 {
1590 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
1591 Err(indent_sizes) => {
1592 self.pending_autoindent = Some(cx.spawn(async move |this, cx| {
1593 let indent_sizes = indent_sizes.await;
1594 this.update(cx, |this, cx| {
1595 this.apply_autoindents(indent_sizes, cx);
1596 })
1597 .ok();
1598 }));
1599 }
1600 }
1601 } else {
1602 self.autoindent_requests.clear();
1603 }
1604 }
1605
1606 fn compute_autoindents(
1607 &self,
1608 ) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>> + use<>> {
1609 let max_rows_between_yields = 100;
1610 let snapshot = self.snapshot();
1611 if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
1612 return None;
1613 }
1614
1615 let autoindent_requests = self.autoindent_requests.clone();
1616 Some(async move {
1617 let mut indent_sizes = BTreeMap::<u32, (IndentSize, bool)>::new();
1618 for request in autoindent_requests {
1619 // Resolve each edited range to its row in the current buffer and in the
1620 // buffer before this batch of edits.
1621 let mut row_ranges = Vec::new();
1622 let mut old_to_new_rows = BTreeMap::new();
1623 let mut language_indent_sizes_by_new_row = Vec::new();
1624 for entry in &request.entries {
1625 let position = entry.range.start;
1626 let new_row = position.to_point(&snapshot).row;
1627 let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
1628 language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
1629
1630 if !entry.first_line_is_new {
1631 let old_row = position.to_point(&request.before_edit).row;
1632 old_to_new_rows.insert(old_row, new_row);
1633 }
1634 row_ranges.push((new_row..new_end_row, entry.original_indent_column));
1635 }
1636
1637 // Build a map containing the suggested indentation for each of the edited lines
1638 // with respect to the state of the buffer before these edits. This map is keyed
1639 // by the rows for these lines in the current state of the buffer.
1640 let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
1641 let old_edited_ranges =
1642 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
1643 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1644 let mut language_indent_size = IndentSize::default();
1645 for old_edited_range in old_edited_ranges {
1646 let suggestions = request
1647 .before_edit
1648 .suggest_autoindents(old_edited_range.clone())
1649 .into_iter()
1650 .flatten();
1651 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1652 if let Some(suggestion) = suggestion {
1653 let new_row = *old_to_new_rows.get(&old_row).unwrap();
1654
1655 // Find the indent size based on the language for this row.
1656 while let Some((row, size)) = language_indent_sizes.peek() {
1657 if *row > new_row {
1658 break;
1659 }
1660 language_indent_size = *size;
1661 language_indent_sizes.next();
1662 }
1663
1664 let suggested_indent = old_to_new_rows
1665 .get(&suggestion.basis_row)
1666 .and_then(|from_row| {
1667 Some(old_suggestions.get(from_row).copied()?.0)
1668 })
1669 .unwrap_or_else(|| {
1670 request
1671 .before_edit
1672 .indent_size_for_line(suggestion.basis_row)
1673 })
1674 .with_delta(suggestion.delta, language_indent_size);
1675 old_suggestions
1676 .insert(new_row, (suggested_indent, suggestion.within_error));
1677 }
1678 }
1679 yield_now().await;
1680 }
1681
1682 // Compute new suggestions for each line, but only include them in the result
1683 // if they differ from the old suggestion for that line.
1684 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1685 let mut language_indent_size = IndentSize::default();
1686 for (row_range, original_indent_column) in row_ranges {
1687 let new_edited_row_range = if request.is_block_mode {
1688 row_range.start..row_range.start + 1
1689 } else {
1690 row_range.clone()
1691 };
1692
1693 let suggestions = snapshot
1694 .suggest_autoindents(new_edited_row_range.clone())
1695 .into_iter()
1696 .flatten();
1697 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
1698 if let Some(suggestion) = suggestion {
1699 // Find the indent size based on the language for this row.
1700 while let Some((row, size)) = language_indent_sizes.peek() {
1701 if *row > new_row {
1702 break;
1703 }
1704 language_indent_size = *size;
1705 language_indent_sizes.next();
1706 }
1707
1708 let suggested_indent = indent_sizes
1709 .get(&suggestion.basis_row)
1710 .copied()
1711 .map(|e| e.0)
1712 .unwrap_or_else(|| {
1713 snapshot.indent_size_for_line(suggestion.basis_row)
1714 })
1715 .with_delta(suggestion.delta, language_indent_size);
1716
1717 if old_suggestions.get(&new_row).map_or(
1718 true,
1719 |(old_indentation, was_within_error)| {
1720 suggested_indent != *old_indentation
1721 && (!suggestion.within_error || *was_within_error)
1722 },
1723 ) {
1724 indent_sizes.insert(
1725 new_row,
1726 (suggested_indent, request.ignore_empty_lines),
1727 );
1728 }
1729 }
1730 }
1731
1732 if let (true, Some(original_indent_column)) =
1733 (request.is_block_mode, original_indent_column)
1734 {
1735 let new_indent =
1736 if let Some((indent, _)) = indent_sizes.get(&row_range.start) {
1737 *indent
1738 } else {
1739 snapshot.indent_size_for_line(row_range.start)
1740 };
1741 let delta = new_indent.len as i64 - original_indent_column as i64;
1742 if delta != 0 {
1743 for row in row_range.skip(1) {
1744 indent_sizes.entry(row).or_insert_with(|| {
1745 let mut size = snapshot.indent_size_for_line(row);
1746 if size.kind == new_indent.kind {
1747 match delta.cmp(&0) {
1748 Ordering::Greater => size.len += delta as u32,
1749 Ordering::Less => {
1750 size.len = size.len.saturating_sub(-delta as u32)
1751 }
1752 Ordering::Equal => {}
1753 }
1754 }
1755 (size, request.ignore_empty_lines)
1756 });
1757 }
1758 }
1759 }
1760
1761 yield_now().await;
1762 }
1763 }
1764
1765 indent_sizes
1766 .into_iter()
1767 .filter_map(|(row, (indent, ignore_empty_lines))| {
1768 if ignore_empty_lines && snapshot.line_len(row) == 0 {
1769 None
1770 } else {
1771 Some((row, indent))
1772 }
1773 })
1774 .collect()
1775 })
1776 }
1777
1778 fn apply_autoindents(
1779 &mut self,
1780 indent_sizes: BTreeMap<u32, IndentSize>,
1781 cx: &mut Context<Self>,
1782 ) {
1783 self.autoindent_requests.clear();
1784
1785 let edits: Vec<_> = indent_sizes
1786 .into_iter()
1787 .filter_map(|(row, indent_size)| {
1788 let current_size = indent_size_for_line(self, row);
1789 Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
1790 })
1791 .collect();
1792
1793 let preserve_preview = self.preserve_preview();
1794 self.edit(edits, None, cx);
1795 if preserve_preview {
1796 self.refresh_preview();
1797 }
1798 }
1799
1800 /// Create a minimal edit that will cause the given row to be indented
1801 /// with the given size. After applying this edit, the length of the line
1802 /// will always be at least `new_size.len`.
1803 pub fn edit_for_indent_size_adjustment(
1804 row: u32,
1805 current_size: IndentSize,
1806 new_size: IndentSize,
1807 ) -> Option<(Range<Point>, String)> {
1808 if new_size.kind == current_size.kind {
1809 match new_size.len.cmp(¤t_size.len) {
1810 Ordering::Greater => {
1811 let point = Point::new(row, 0);
1812 Some((
1813 point..point,
1814 iter::repeat(new_size.char())
1815 .take((new_size.len - current_size.len) as usize)
1816 .collect::<String>(),
1817 ))
1818 }
1819
1820 Ordering::Less => Some((
1821 Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
1822 String::new(),
1823 )),
1824
1825 Ordering::Equal => None,
1826 }
1827 } else {
1828 Some((
1829 Point::new(row, 0)..Point::new(row, current_size.len),
1830 iter::repeat(new_size.char())
1831 .take(new_size.len as usize)
1832 .collect::<String>(),
1833 ))
1834 }
1835 }
1836
1837 /// Spawns a background task that asynchronously computes a `Diff` between the buffer's text
1838 /// and the given new text.
1839 pub fn diff(&self, mut new_text: String, cx: &App) -> Task<Diff> {
1840 let old_text = self.as_rope().clone();
1841 let base_version = self.version();
1842 cx.background_executor()
1843 .spawn_labeled(*BUFFER_DIFF_TASK, async move {
1844 let old_text = old_text.to_string();
1845 let line_ending = LineEnding::detect(&new_text);
1846 LineEnding::normalize(&mut new_text);
1847 let edits = text_diff(&old_text, &new_text);
1848 Diff {
1849 base_version,
1850 line_ending,
1851 edits,
1852 }
1853 })
1854 }
1855
1856 /// Spawns a background task that searches the buffer for any whitespace
1857 /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
1858 pub fn remove_trailing_whitespace(&self, cx: &App) -> Task<Diff> {
1859 let old_text = self.as_rope().clone();
1860 let line_ending = self.line_ending();
1861 let base_version = self.version();
1862 cx.background_spawn(async move {
1863 let ranges = trailing_whitespace_ranges(&old_text);
1864 let empty = Arc::<str>::from("");
1865 Diff {
1866 base_version,
1867 line_ending,
1868 edits: ranges
1869 .into_iter()
1870 .map(|range| (range, empty.clone()))
1871 .collect(),
1872 }
1873 })
1874 }
1875
1876 /// Ensures that the buffer ends with a single newline character, and
1877 /// no other whitespace. Skips if the buffer is empty.
1878 pub fn ensure_final_newline(&mut self, cx: &mut Context<Self>) {
1879 let len = self.len();
1880 if len == 0 {
1881 return;
1882 }
1883 let mut offset = len;
1884 for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
1885 let non_whitespace_len = chunk
1886 .trim_end_matches(|c: char| c.is_ascii_whitespace())
1887 .len();
1888 offset -= chunk.len();
1889 offset += non_whitespace_len;
1890 if non_whitespace_len != 0 {
1891 if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
1892 return;
1893 }
1894 break;
1895 }
1896 }
1897 self.edit([(offset..len, "\n")], None, cx);
1898 }
1899
1900 /// Applies a diff to the buffer. If the buffer has changed since the given diff was
1901 /// calculated, then adjust the diff to account for those changes, and discard any
1902 /// parts of the diff that conflict with those changes.
1903 pub fn apply_diff(&mut self, diff: Diff, cx: &mut Context<Self>) -> Option<TransactionId> {
1904 let snapshot = self.snapshot();
1905 let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
1906 let mut delta = 0;
1907 let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
1908 while let Some(edit_since) = edits_since.peek() {
1909 // If the edit occurs after a diff hunk, then it does not
1910 // affect that hunk.
1911 if edit_since.old.start > range.end {
1912 break;
1913 }
1914 // If the edit precedes the diff hunk, then adjust the hunk
1915 // to reflect the edit.
1916 else if edit_since.old.end < range.start {
1917 delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
1918 edits_since.next();
1919 }
1920 // If the edit intersects a diff hunk, then discard that hunk.
1921 else {
1922 return None;
1923 }
1924 }
1925
1926 let start = (range.start as i64 + delta) as usize;
1927 let end = (range.end as i64 + delta) as usize;
1928 Some((start..end, new_text))
1929 });
1930
1931 self.start_transaction();
1932 self.text.set_line_ending(diff.line_ending);
1933 self.edit(adjusted_edits, None, cx);
1934 self.end_transaction(cx)
1935 }
1936
1937 fn has_unsaved_edits(&self) -> bool {
1938 let (last_version, has_unsaved_edits) = self.has_unsaved_edits.take();
1939
1940 if last_version == self.version {
1941 self.has_unsaved_edits
1942 .set((last_version, has_unsaved_edits));
1943 return has_unsaved_edits;
1944 }
1945
1946 let has_edits = self.has_edits_since(&self.saved_version);
1947 self.has_unsaved_edits
1948 .set((self.version.clone(), has_edits));
1949 has_edits
1950 }
1951
1952 /// Checks if the buffer has unsaved changes.
1953 pub fn is_dirty(&self) -> bool {
1954 if self.capability == Capability::ReadOnly {
1955 return false;
1956 }
1957 if self.has_conflict {
1958 return true;
1959 }
1960 match self.file.as_ref().map(|f| f.disk_state()) {
1961 Some(DiskState::New) | Some(DiskState::Deleted) => {
1962 !self.is_empty() && self.has_unsaved_edits()
1963 }
1964 _ => self.has_unsaved_edits(),
1965 }
1966 }
1967
1968 /// Checks if the buffer and its file have both changed since the buffer
1969 /// was last saved or reloaded.
1970 pub fn has_conflict(&self) -> bool {
1971 if self.has_conflict {
1972 return true;
1973 }
1974 let Some(file) = self.file.as_ref() else {
1975 return false;
1976 };
1977 match file.disk_state() {
1978 DiskState::New => false,
1979 DiskState::Present { mtime } => match self.saved_mtime {
1980 Some(saved_mtime) => {
1981 mtime.bad_is_greater_than(saved_mtime) && self.has_unsaved_edits()
1982 }
1983 None => true,
1984 },
1985 DiskState::Deleted => false,
1986 }
1987 }
1988
1989 /// Gets a [`Subscription`] that tracks all of the changes to the buffer's text.
1990 pub fn subscribe(&mut self) -> Subscription {
1991 self.text.subscribe()
1992 }
1993
1994 /// Adds a bit to the list of bits that are set when the buffer's text changes.
1995 ///
1996 /// This allows downstream code to check if the buffer's text has changed without
1997 /// waiting for an effect cycle, which would be required if using eents.
1998 pub fn record_changes(&mut self, bit: rc::Weak<Cell<bool>>) {
1999 if let Err(ix) = self
2000 .change_bits
2001 .binary_search_by_key(&rc::Weak::as_ptr(&bit), rc::Weak::as_ptr)
2002 {
2003 self.change_bits.insert(ix, bit);
2004 }
2005 }
2006
2007 fn was_changed(&mut self) {
2008 self.change_bits.retain(|change_bit| {
2009 change_bit.upgrade().map_or(false, |bit| {
2010 bit.replace(true);
2011 true
2012 })
2013 });
2014 }
2015
2016 /// Starts a transaction, if one is not already in-progress. When undoing or
2017 /// redoing edits, all of the edits performed within a transaction are undone
2018 /// or redone together.
2019 pub fn start_transaction(&mut self) -> Option<TransactionId> {
2020 self.start_transaction_at(Instant::now())
2021 }
2022
2023 /// Starts a transaction, providing the current time. Subsequent transactions
2024 /// that occur within a short period of time will be grouped together. This
2025 /// is controlled by the buffer's undo grouping duration.
2026 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
2027 self.transaction_depth += 1;
2028 if self.was_dirty_before_starting_transaction.is_none() {
2029 self.was_dirty_before_starting_transaction = Some(self.is_dirty());
2030 }
2031 self.text.start_transaction_at(now)
2032 }
2033
2034 /// Terminates the current transaction, if this is the outermost transaction.
2035 pub fn end_transaction(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2036 self.end_transaction_at(Instant::now(), cx)
2037 }
2038
2039 /// Terminates the current transaction, providing the current time. Subsequent transactions
2040 /// that occur within a short period of time will be grouped together. This
2041 /// is controlled by the buffer's undo grouping duration.
2042 pub fn end_transaction_at(
2043 &mut self,
2044 now: Instant,
2045 cx: &mut Context<Self>,
2046 ) -> Option<TransactionId> {
2047 assert!(self.transaction_depth > 0);
2048 self.transaction_depth -= 1;
2049 let was_dirty = if self.transaction_depth == 0 {
2050 self.was_dirty_before_starting_transaction.take().unwrap()
2051 } else {
2052 false
2053 };
2054 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
2055 self.did_edit(&start_version, was_dirty, cx);
2056 Some(transaction_id)
2057 } else {
2058 None
2059 }
2060 }
2061
2062 /// Manually add a transaction to the buffer's undo history.
2063 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
2064 self.text.push_transaction(transaction, now);
2065 }
2066
2067 /// Prevent the last transaction from being grouped with any subsequent transactions,
2068 /// even if they occur with the buffer's undo grouping duration.
2069 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
2070 self.text.finalize_last_transaction()
2071 }
2072
2073 /// Manually group all changes since a given transaction.
2074 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
2075 self.text.group_until_transaction(transaction_id);
2076 }
2077
2078 /// Manually remove a transaction from the buffer's undo history
2079 pub fn forget_transaction(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
2080 self.text.forget_transaction(transaction_id)
2081 }
2082
2083 /// Retrieve a transaction from the buffer's undo history
2084 pub fn get_transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> {
2085 self.text.get_transaction(transaction_id)
2086 }
2087
2088 /// Manually merge two transactions in the buffer's undo history.
2089 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
2090 self.text.merge_transactions(transaction, destination);
2091 }
2092
2093 /// Waits for the buffer to receive operations with the given timestamps.
2094 pub fn wait_for_edits<It: IntoIterator<Item = clock::Lamport>>(
2095 &mut self,
2096 edit_ids: It,
2097 ) -> impl Future<Output = Result<()>> + use<It> {
2098 self.text.wait_for_edits(edit_ids)
2099 }
2100
2101 /// Waits for the buffer to receive the operations necessary for resolving the given anchors.
2102 pub fn wait_for_anchors<It: IntoIterator<Item = Anchor>>(
2103 &mut self,
2104 anchors: It,
2105 ) -> impl 'static + Future<Output = Result<()>> + use<It> {
2106 self.text.wait_for_anchors(anchors)
2107 }
2108
2109 /// Waits for the buffer to receive operations up to the given version.
2110 pub fn wait_for_version(
2111 &mut self,
2112 version: clock::Global,
2113 ) -> impl Future<Output = Result<()>> + use<> {
2114 self.text.wait_for_version(version)
2115 }
2116
2117 /// Forces all futures returned by [`Buffer::wait_for_version`], [`Buffer::wait_for_edits`], or
2118 /// [`Buffer::wait_for_version`] to resolve with an error.
2119 pub fn give_up_waiting(&mut self) {
2120 self.text.give_up_waiting();
2121 }
2122
2123 /// Stores a set of selections that should be broadcasted to all of the buffer's replicas.
2124 pub fn set_active_selections(
2125 &mut self,
2126 selections: Arc<[Selection<Anchor>]>,
2127 line_mode: bool,
2128 cursor_shape: CursorShape,
2129 cx: &mut Context<Self>,
2130 ) {
2131 let lamport_timestamp = self.text.lamport_clock.tick();
2132 self.remote_selections.insert(
2133 self.text.replica_id(),
2134 SelectionSet {
2135 selections: selections.clone(),
2136 lamport_timestamp,
2137 line_mode,
2138 cursor_shape,
2139 },
2140 );
2141 self.send_operation(
2142 Operation::UpdateSelections {
2143 selections,
2144 line_mode,
2145 lamport_timestamp,
2146 cursor_shape,
2147 },
2148 true,
2149 cx,
2150 );
2151 self.non_text_state_update_count += 1;
2152 cx.notify();
2153 }
2154
2155 /// Clears the selections, so that other replicas of the buffer do not see any selections for
2156 /// this replica.
2157 pub fn remove_active_selections(&mut self, cx: &mut Context<Self>) {
2158 if self
2159 .remote_selections
2160 .get(&self.text.replica_id())
2161 .map_or(true, |set| !set.selections.is_empty())
2162 {
2163 self.set_active_selections(Arc::default(), false, Default::default(), cx);
2164 }
2165 }
2166
2167 pub fn set_agent_selections(
2168 &mut self,
2169 selections: Arc<[Selection<Anchor>]>,
2170 line_mode: bool,
2171 cursor_shape: CursorShape,
2172 cx: &mut Context<Self>,
2173 ) {
2174 let lamport_timestamp = self.text.lamport_clock.tick();
2175 self.remote_selections.insert(
2176 AGENT_REPLICA_ID,
2177 SelectionSet {
2178 selections: selections.clone(),
2179 lamport_timestamp,
2180 line_mode,
2181 cursor_shape,
2182 },
2183 );
2184 self.non_text_state_update_count += 1;
2185 cx.notify();
2186 }
2187
2188 pub fn remove_agent_selections(&mut self, cx: &mut Context<Self>) {
2189 self.set_agent_selections(Arc::default(), false, Default::default(), cx);
2190 }
2191
2192 /// Replaces the buffer's entire text.
2193 pub fn set_text<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2194 where
2195 T: Into<Arc<str>>,
2196 {
2197 self.autoindent_requests.clear();
2198 self.edit([(0..self.len(), text)], None, cx)
2199 }
2200
2201 /// Appends the given text to the end of the buffer.
2202 pub fn append<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2203 where
2204 T: Into<Arc<str>>,
2205 {
2206 self.edit([(self.len()..self.len(), text)], None, cx)
2207 }
2208
2209 /// Applies the given edits to the buffer. Each edit is specified as a range of text to
2210 /// delete, and a string of text to insert at that location.
2211 ///
2212 /// If an [`AutoindentMode`] is provided, then the buffer will enqueue an auto-indent
2213 /// request for the edited ranges, which will be processed when the buffer finishes
2214 /// parsing.
2215 ///
2216 /// Parsing takes place at the end of a transaction, and may compute synchronously
2217 /// or asynchronously, depending on the changes.
2218 pub fn edit<I, S, T>(
2219 &mut self,
2220 edits_iter: I,
2221 autoindent_mode: Option<AutoindentMode>,
2222 cx: &mut Context<Self>,
2223 ) -> Option<clock::Lamport>
2224 where
2225 I: IntoIterator<Item = (Range<S>, T)>,
2226 S: ToOffset,
2227 T: Into<Arc<str>>,
2228 {
2229 // Skip invalid edits and coalesce contiguous ones.
2230 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
2231
2232 for (range, new_text) in edits_iter {
2233 let mut range = range.start.to_offset(self)..range.end.to_offset(self);
2234
2235 if range.start > range.end {
2236 mem::swap(&mut range.start, &mut range.end);
2237 }
2238 let new_text = new_text.into();
2239 if !new_text.is_empty() || !range.is_empty() {
2240 if let Some((prev_range, prev_text)) = edits.last_mut() {
2241 if prev_range.end >= range.start {
2242 prev_range.end = cmp::max(prev_range.end, range.end);
2243 *prev_text = format!("{prev_text}{new_text}").into();
2244 } else {
2245 edits.push((range, new_text));
2246 }
2247 } else {
2248 edits.push((range, new_text));
2249 }
2250 }
2251 }
2252 if edits.is_empty() {
2253 return None;
2254 }
2255
2256 self.start_transaction();
2257 self.pending_autoindent.take();
2258 let autoindent_request = autoindent_mode
2259 .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
2260
2261 let edit_operation = self.text.edit(edits.iter().cloned());
2262 let edit_id = edit_operation.timestamp();
2263
2264 if let Some((before_edit, mode)) = autoindent_request {
2265 let mut delta = 0isize;
2266 let entries = edits
2267 .into_iter()
2268 .enumerate()
2269 .zip(&edit_operation.as_edit().unwrap().new_text)
2270 .map(|((ix, (range, _)), new_text)| {
2271 let new_text_length = new_text.len();
2272 let old_start = range.start.to_point(&before_edit);
2273 let new_start = (delta + range.start as isize) as usize;
2274 let range_len = range.end - range.start;
2275 delta += new_text_length as isize - range_len as isize;
2276
2277 // Decide what range of the insertion to auto-indent, and whether
2278 // the first line of the insertion should be considered a newly-inserted line
2279 // or an edit to an existing line.
2280 let mut range_of_insertion_to_indent = 0..new_text_length;
2281 let mut first_line_is_new = true;
2282
2283 let old_line_start = before_edit.indent_size_for_line(old_start.row).len;
2284 let old_line_end = before_edit.line_len(old_start.row);
2285
2286 if old_start.column > old_line_start {
2287 first_line_is_new = false;
2288 }
2289
2290 if !new_text.contains('\n')
2291 && (old_start.column + (range_len as u32) < old_line_end
2292 || old_line_end == old_line_start)
2293 {
2294 first_line_is_new = false;
2295 }
2296
2297 // When inserting text starting with a newline, avoid auto-indenting the
2298 // previous line.
2299 if new_text.starts_with('\n') {
2300 range_of_insertion_to_indent.start += 1;
2301 first_line_is_new = true;
2302 }
2303
2304 let mut original_indent_column = None;
2305 if let AutoindentMode::Block {
2306 original_indent_columns,
2307 } = &mode
2308 {
2309 original_indent_column = Some(if new_text.starts_with('\n') {
2310 indent_size_for_text(
2311 new_text[range_of_insertion_to_indent.clone()].chars(),
2312 )
2313 .len
2314 } else {
2315 original_indent_columns
2316 .get(ix)
2317 .copied()
2318 .flatten()
2319 .unwrap_or_else(|| {
2320 indent_size_for_text(
2321 new_text[range_of_insertion_to_indent.clone()].chars(),
2322 )
2323 .len
2324 })
2325 });
2326
2327 // Avoid auto-indenting the line after the edit.
2328 if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
2329 range_of_insertion_to_indent.end -= 1;
2330 }
2331 }
2332
2333 AutoindentRequestEntry {
2334 first_line_is_new,
2335 original_indent_column,
2336 indent_size: before_edit.language_indent_size_at(range.start, cx),
2337 range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
2338 ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
2339 }
2340 })
2341 .collect();
2342
2343 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2344 before_edit,
2345 entries,
2346 is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
2347 ignore_empty_lines: false,
2348 }));
2349 }
2350
2351 self.end_transaction(cx);
2352 self.send_operation(Operation::Buffer(edit_operation), true, cx);
2353 Some(edit_id)
2354 }
2355
2356 fn did_edit(&mut self, old_version: &clock::Global, was_dirty: bool, cx: &mut Context<Self>) {
2357 self.was_changed();
2358
2359 if self.edits_since::<usize>(old_version).next().is_none() {
2360 return;
2361 }
2362
2363 self.reparse(cx);
2364 cx.emit(BufferEvent::Edited);
2365 if was_dirty != self.is_dirty() {
2366 cx.emit(BufferEvent::DirtyChanged);
2367 }
2368 cx.notify();
2369 }
2370
2371 pub fn autoindent_ranges<I, T>(&mut self, ranges: I, cx: &mut Context<Self>)
2372 where
2373 I: IntoIterator<Item = Range<T>>,
2374 T: ToOffset + Copy,
2375 {
2376 let before_edit = self.snapshot();
2377 let entries = ranges
2378 .into_iter()
2379 .map(|range| AutoindentRequestEntry {
2380 range: before_edit.anchor_before(range.start)..before_edit.anchor_after(range.end),
2381 first_line_is_new: true,
2382 indent_size: before_edit.language_indent_size_at(range.start, cx),
2383 original_indent_column: None,
2384 })
2385 .collect();
2386 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2387 before_edit,
2388 entries,
2389 is_block_mode: false,
2390 ignore_empty_lines: true,
2391 }));
2392 self.request_autoindent(cx);
2393 }
2394
2395 // Inserts newlines at the given position to create an empty line, returning the start of the new line.
2396 // You can also request the insertion of empty lines above and below the line starting at the returned point.
2397 pub fn insert_empty_line(
2398 &mut self,
2399 position: impl ToPoint,
2400 space_above: bool,
2401 space_below: bool,
2402 cx: &mut Context<Self>,
2403 ) -> Point {
2404 let mut position = position.to_point(self);
2405
2406 self.start_transaction();
2407
2408 self.edit(
2409 [(position..position, "\n")],
2410 Some(AutoindentMode::EachLine),
2411 cx,
2412 );
2413
2414 if position.column > 0 {
2415 position += Point::new(1, 0);
2416 }
2417
2418 if !self.is_line_blank(position.row) {
2419 self.edit(
2420 [(position..position, "\n")],
2421 Some(AutoindentMode::EachLine),
2422 cx,
2423 );
2424 }
2425
2426 if space_above && position.row > 0 && !self.is_line_blank(position.row - 1) {
2427 self.edit(
2428 [(position..position, "\n")],
2429 Some(AutoindentMode::EachLine),
2430 cx,
2431 );
2432 position.row += 1;
2433 }
2434
2435 if space_below
2436 && (position.row == self.max_point().row || !self.is_line_blank(position.row + 1))
2437 {
2438 self.edit(
2439 [(position..position, "\n")],
2440 Some(AutoindentMode::EachLine),
2441 cx,
2442 );
2443 }
2444
2445 self.end_transaction(cx);
2446
2447 position
2448 }
2449
2450 /// Applies the given remote operations to the buffer.
2451 pub fn apply_ops<I: IntoIterator<Item = Operation>>(&mut self, ops: I, cx: &mut Context<Self>) {
2452 self.pending_autoindent.take();
2453 let was_dirty = self.is_dirty();
2454 let old_version = self.version.clone();
2455 let mut deferred_ops = Vec::new();
2456 let buffer_ops = ops
2457 .into_iter()
2458 .filter_map(|op| match op {
2459 Operation::Buffer(op) => Some(op),
2460 _ => {
2461 if self.can_apply_op(&op) {
2462 self.apply_op(op, cx);
2463 } else {
2464 deferred_ops.push(op);
2465 }
2466 None
2467 }
2468 })
2469 .collect::<Vec<_>>();
2470 for operation in buffer_ops.iter() {
2471 self.send_operation(Operation::Buffer(operation.clone()), false, cx);
2472 }
2473 self.text.apply_ops(buffer_ops);
2474 self.deferred_ops.insert(deferred_ops);
2475 self.flush_deferred_ops(cx);
2476 self.did_edit(&old_version, was_dirty, cx);
2477 // Notify independently of whether the buffer was edited as the operations could include a
2478 // selection update.
2479 cx.notify();
2480 }
2481
2482 fn flush_deferred_ops(&mut self, cx: &mut Context<Self>) {
2483 let mut deferred_ops = Vec::new();
2484 for op in self.deferred_ops.drain().iter().cloned() {
2485 if self.can_apply_op(&op) {
2486 self.apply_op(op, cx);
2487 } else {
2488 deferred_ops.push(op);
2489 }
2490 }
2491 self.deferred_ops.insert(deferred_ops);
2492 }
2493
2494 pub fn has_deferred_ops(&self) -> bool {
2495 !self.deferred_ops.is_empty() || self.text.has_deferred_ops()
2496 }
2497
2498 fn can_apply_op(&self, operation: &Operation) -> bool {
2499 match operation {
2500 Operation::Buffer(_) => {
2501 unreachable!("buffer operations should never be applied at this layer")
2502 }
2503 Operation::UpdateDiagnostics {
2504 diagnostics: diagnostic_set,
2505 ..
2506 } => diagnostic_set.iter().all(|diagnostic| {
2507 self.text.can_resolve(&diagnostic.range.start)
2508 && self.text.can_resolve(&diagnostic.range.end)
2509 }),
2510 Operation::UpdateSelections { selections, .. } => selections
2511 .iter()
2512 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
2513 Operation::UpdateCompletionTriggers { .. } => true,
2514 }
2515 }
2516
2517 fn apply_op(&mut self, operation: Operation, cx: &mut Context<Self>) {
2518 match operation {
2519 Operation::Buffer(_) => {
2520 unreachable!("buffer operations should never be applied at this layer")
2521 }
2522 Operation::UpdateDiagnostics {
2523 server_id,
2524 diagnostics: diagnostic_set,
2525 lamport_timestamp,
2526 } => {
2527 let snapshot = self.snapshot();
2528 self.apply_diagnostic_update(
2529 server_id,
2530 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
2531 lamport_timestamp,
2532 cx,
2533 );
2534 }
2535 Operation::UpdateSelections {
2536 selections,
2537 lamport_timestamp,
2538 line_mode,
2539 cursor_shape,
2540 } => {
2541 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id) {
2542 if set.lamport_timestamp > lamport_timestamp {
2543 return;
2544 }
2545 }
2546
2547 self.remote_selections.insert(
2548 lamport_timestamp.replica_id,
2549 SelectionSet {
2550 selections,
2551 lamport_timestamp,
2552 line_mode,
2553 cursor_shape,
2554 },
2555 );
2556 self.text.lamport_clock.observe(lamport_timestamp);
2557 self.non_text_state_update_count += 1;
2558 }
2559 Operation::UpdateCompletionTriggers {
2560 triggers,
2561 lamport_timestamp,
2562 server_id,
2563 } => {
2564 if triggers.is_empty() {
2565 self.completion_triggers_per_language_server
2566 .remove(&server_id);
2567 self.completion_triggers = self
2568 .completion_triggers_per_language_server
2569 .values()
2570 .flat_map(|triggers| triggers.into_iter().cloned())
2571 .collect();
2572 } else {
2573 self.completion_triggers_per_language_server
2574 .insert(server_id, triggers.iter().cloned().collect());
2575 self.completion_triggers.extend(triggers);
2576 }
2577 self.text.lamport_clock.observe(lamport_timestamp);
2578 }
2579 }
2580 }
2581
2582 fn apply_diagnostic_update(
2583 &mut self,
2584 server_id: LanguageServerId,
2585 diagnostics: DiagnosticSet,
2586 lamport_timestamp: clock::Lamport,
2587 cx: &mut Context<Self>,
2588 ) {
2589 if lamport_timestamp > self.diagnostics_timestamp {
2590 let ix = self.diagnostics.binary_search_by_key(&server_id, |e| e.0);
2591 if diagnostics.is_empty() {
2592 if let Ok(ix) = ix {
2593 self.diagnostics.remove(ix);
2594 }
2595 } else {
2596 match ix {
2597 Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)),
2598 Ok(ix) => self.diagnostics[ix].1 = diagnostics,
2599 };
2600 }
2601 self.diagnostics_timestamp = lamport_timestamp;
2602 self.non_text_state_update_count += 1;
2603 self.text.lamport_clock.observe(lamport_timestamp);
2604 cx.notify();
2605 cx.emit(BufferEvent::DiagnosticsUpdated);
2606 }
2607 }
2608
2609 fn send_operation(&mut self, operation: Operation, is_local: bool, cx: &mut Context<Self>) {
2610 self.was_changed();
2611 cx.emit(BufferEvent::Operation {
2612 operation,
2613 is_local,
2614 });
2615 }
2616
2617 /// Removes the selections for a given peer.
2618 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut Context<Self>) {
2619 self.remote_selections.remove(&replica_id);
2620 cx.notify();
2621 }
2622
2623 /// Undoes the most recent transaction.
2624 pub fn undo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2625 let was_dirty = self.is_dirty();
2626 let old_version = self.version.clone();
2627
2628 if let Some((transaction_id, operation)) = self.text.undo() {
2629 self.send_operation(Operation::Buffer(operation), true, cx);
2630 self.did_edit(&old_version, was_dirty, cx);
2631 Some(transaction_id)
2632 } else {
2633 None
2634 }
2635 }
2636
2637 /// Manually undoes a specific transaction in the buffer's undo history.
2638 pub fn undo_transaction(
2639 &mut self,
2640 transaction_id: TransactionId,
2641 cx: &mut Context<Self>,
2642 ) -> bool {
2643 let was_dirty = self.is_dirty();
2644 let old_version = self.version.clone();
2645 if let Some(operation) = self.text.undo_transaction(transaction_id) {
2646 self.send_operation(Operation::Buffer(operation), true, cx);
2647 self.did_edit(&old_version, was_dirty, cx);
2648 true
2649 } else {
2650 false
2651 }
2652 }
2653
2654 /// Manually undoes all changes after a given transaction in the buffer's undo history.
2655 pub fn undo_to_transaction(
2656 &mut self,
2657 transaction_id: TransactionId,
2658 cx: &mut Context<Self>,
2659 ) -> bool {
2660 let was_dirty = self.is_dirty();
2661 let old_version = self.version.clone();
2662
2663 let operations = self.text.undo_to_transaction(transaction_id);
2664 let undone = !operations.is_empty();
2665 for operation in operations {
2666 self.send_operation(Operation::Buffer(operation), true, cx);
2667 }
2668 if undone {
2669 self.did_edit(&old_version, was_dirty, cx)
2670 }
2671 undone
2672 }
2673
2674 pub fn undo_operations(&mut self, counts: HashMap<Lamport, u32>, cx: &mut Context<Buffer>) {
2675 let was_dirty = self.is_dirty();
2676 let operation = self.text.undo_operations(counts);
2677 let old_version = self.version.clone();
2678 self.send_operation(Operation::Buffer(operation), true, cx);
2679 self.did_edit(&old_version, was_dirty, cx);
2680 }
2681
2682 /// Manually redoes a specific transaction in the buffer's redo history.
2683 pub fn redo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2684 let was_dirty = self.is_dirty();
2685 let old_version = self.version.clone();
2686
2687 if let Some((transaction_id, operation)) = self.text.redo() {
2688 self.send_operation(Operation::Buffer(operation), true, cx);
2689 self.did_edit(&old_version, was_dirty, cx);
2690 Some(transaction_id)
2691 } else {
2692 None
2693 }
2694 }
2695
2696 /// Manually undoes all changes until a given transaction in the buffer's redo history.
2697 pub fn redo_to_transaction(
2698 &mut self,
2699 transaction_id: TransactionId,
2700 cx: &mut Context<Self>,
2701 ) -> bool {
2702 let was_dirty = self.is_dirty();
2703 let old_version = self.version.clone();
2704
2705 let operations = self.text.redo_to_transaction(transaction_id);
2706 let redone = !operations.is_empty();
2707 for operation in operations {
2708 self.send_operation(Operation::Buffer(operation), true, cx);
2709 }
2710 if redone {
2711 self.did_edit(&old_version, was_dirty, cx)
2712 }
2713 redone
2714 }
2715
2716 /// Override current completion triggers with the user-provided completion triggers.
2717 pub fn set_completion_triggers(
2718 &mut self,
2719 server_id: LanguageServerId,
2720 triggers: BTreeSet<String>,
2721 cx: &mut Context<Self>,
2722 ) {
2723 self.completion_triggers_timestamp = self.text.lamport_clock.tick();
2724 if triggers.is_empty() {
2725 self.completion_triggers_per_language_server
2726 .remove(&server_id);
2727 self.completion_triggers = self
2728 .completion_triggers_per_language_server
2729 .values()
2730 .flat_map(|triggers| triggers.into_iter().cloned())
2731 .collect();
2732 } else {
2733 self.completion_triggers_per_language_server
2734 .insert(server_id, triggers.clone());
2735 self.completion_triggers.extend(triggers.iter().cloned());
2736 }
2737 self.send_operation(
2738 Operation::UpdateCompletionTriggers {
2739 triggers: triggers.into_iter().collect(),
2740 lamport_timestamp: self.completion_triggers_timestamp,
2741 server_id,
2742 },
2743 true,
2744 cx,
2745 );
2746 cx.notify();
2747 }
2748
2749 /// Returns a list of strings which trigger a completion menu for this language.
2750 /// Usually this is driven by LSP server which returns a list of trigger characters for completions.
2751 pub fn completion_triggers(&self) -> &BTreeSet<String> {
2752 &self.completion_triggers
2753 }
2754
2755 /// Call this directly after performing edits to prevent the preview tab
2756 /// from being dismissed by those edits. It causes `should_dismiss_preview`
2757 /// to return false until there are additional edits.
2758 pub fn refresh_preview(&mut self) {
2759 self.preview_version = self.version.clone();
2760 }
2761
2762 /// Whether we should preserve the preview status of a tab containing this buffer.
2763 pub fn preserve_preview(&self) -> bool {
2764 !self.has_edits_since(&self.preview_version)
2765 }
2766}
2767
2768#[doc(hidden)]
2769#[cfg(any(test, feature = "test-support"))]
2770impl Buffer {
2771 pub fn edit_via_marked_text(
2772 &mut self,
2773 marked_string: &str,
2774 autoindent_mode: Option<AutoindentMode>,
2775 cx: &mut Context<Self>,
2776 ) {
2777 let edits = self.edits_for_marked_text(marked_string);
2778 self.edit(edits, autoindent_mode, cx);
2779 }
2780
2781 pub fn set_group_interval(&mut self, group_interval: Duration) {
2782 self.text.set_group_interval(group_interval);
2783 }
2784
2785 pub fn randomly_edit<T>(&mut self, rng: &mut T, old_range_count: usize, cx: &mut Context<Self>)
2786 where
2787 T: rand::Rng,
2788 {
2789 let mut edits: Vec<(Range<usize>, String)> = Vec::new();
2790 let mut last_end = None;
2791 for _ in 0..old_range_count {
2792 if last_end.map_or(false, |last_end| last_end >= self.len()) {
2793 break;
2794 }
2795
2796 let new_start = last_end.map_or(0, |last_end| last_end + 1);
2797 let mut range = self.random_byte_range(new_start, rng);
2798 if rng.gen_bool(0.2) {
2799 mem::swap(&mut range.start, &mut range.end);
2800 }
2801 last_end = Some(range.end);
2802
2803 let new_text_len = rng.gen_range(0..10);
2804 let mut new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
2805 new_text = new_text.to_uppercase();
2806
2807 edits.push((range, new_text));
2808 }
2809 log::info!("mutating buffer {} with {:?}", self.replica_id(), edits);
2810 self.edit(edits, None, cx);
2811 }
2812
2813 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut Context<Self>) {
2814 let was_dirty = self.is_dirty();
2815 let old_version = self.version.clone();
2816
2817 let ops = self.text.randomly_undo_redo(rng);
2818 if !ops.is_empty() {
2819 for op in ops {
2820 self.send_operation(Operation::Buffer(op), true, cx);
2821 self.did_edit(&old_version, was_dirty, cx);
2822 }
2823 }
2824 }
2825}
2826
2827impl EventEmitter<BufferEvent> for Buffer {}
2828
2829impl Deref for Buffer {
2830 type Target = TextBuffer;
2831
2832 fn deref(&self) -> &Self::Target {
2833 &self.text
2834 }
2835}
2836
2837impl BufferSnapshot {
2838 /// Returns [`IndentSize`] for a given line that respects user settings and
2839 /// language preferences.
2840 pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
2841 indent_size_for_line(self, row)
2842 }
2843
2844 /// Returns [`IndentSize`] for a given position that respects user settings
2845 /// and language preferences.
2846 pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &App) -> IndentSize {
2847 let settings = language_settings(
2848 self.language_at(position).map(|l| l.name()),
2849 self.file(),
2850 cx,
2851 );
2852 if settings.hard_tabs {
2853 IndentSize::tab()
2854 } else {
2855 IndentSize::spaces(settings.tab_size.get())
2856 }
2857 }
2858
2859 /// Retrieve the suggested indent size for all of the given rows. The unit of indentation
2860 /// is passed in as `single_indent_size`.
2861 pub fn suggested_indents(
2862 &self,
2863 rows: impl Iterator<Item = u32>,
2864 single_indent_size: IndentSize,
2865 ) -> BTreeMap<u32, IndentSize> {
2866 let mut result = BTreeMap::new();
2867
2868 for row_range in contiguous_ranges(rows, 10) {
2869 let suggestions = match self.suggest_autoindents(row_range.clone()) {
2870 Some(suggestions) => suggestions,
2871 _ => break,
2872 };
2873
2874 for (row, suggestion) in row_range.zip(suggestions) {
2875 let indent_size = if let Some(suggestion) = suggestion {
2876 result
2877 .get(&suggestion.basis_row)
2878 .copied()
2879 .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
2880 .with_delta(suggestion.delta, single_indent_size)
2881 } else {
2882 self.indent_size_for_line(row)
2883 };
2884
2885 result.insert(row, indent_size);
2886 }
2887 }
2888
2889 result
2890 }
2891
2892 fn suggest_autoindents(
2893 &self,
2894 row_range: Range<u32>,
2895 ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
2896 let config = &self.language.as_ref()?.config;
2897 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
2898 let significant_indentation = config.significant_indentation;
2899
2900 // Find the suggested indentation ranges based on the syntax tree.
2901 let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
2902 let end = Point::new(row_range.end, 0);
2903 let range = (start..end).to_offset(&self.text);
2904 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2905 Some(&grammar.indents_config.as_ref()?.query)
2906 });
2907 let indent_configs = matches
2908 .grammars()
2909 .iter()
2910 .map(|grammar| grammar.indents_config.as_ref().unwrap())
2911 .collect::<Vec<_>>();
2912
2913 let mut indent_ranges = Vec::<Range<Point>>::new();
2914 let mut outdent_positions = Vec::<Point>::new();
2915 while let Some(mat) = matches.peek() {
2916 let mut start: Option<Point> = None;
2917 let mut end: Option<Point> = None;
2918 let mut outdent: Option<Point> = None;
2919
2920 let config = &indent_configs[mat.grammar_index];
2921 for capture in mat.captures {
2922 if capture.index == config.indent_capture_ix {
2923 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
2924 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
2925 } else if Some(capture.index) == config.start_capture_ix {
2926 start = Some(Point::from_ts_point(capture.node.end_position()));
2927 } else if Some(capture.index) == config.end_capture_ix {
2928 end = Some(Point::from_ts_point(capture.node.start_position()));
2929 } else if Some(capture.index) == config.outdent_capture_ix {
2930 let point = Point::from_ts_point(capture.node.start_position());
2931 outdent.get_or_insert(point);
2932 outdent_positions.push(point);
2933 }
2934 }
2935
2936 matches.advance();
2937 // in case of significant indentation expand end to outdent position
2938 let end = if significant_indentation {
2939 outdent.or(end)
2940 } else {
2941 end
2942 };
2943 if let Some((start, end)) = start.zip(end) {
2944 if start.row == end.row && (!significant_indentation || start.column < end.column) {
2945 continue;
2946 }
2947 let range = start..end;
2948 match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
2949 Err(ix) => indent_ranges.insert(ix, range),
2950 Ok(ix) => {
2951 let prev_range = &mut indent_ranges[ix];
2952 prev_range.end = prev_range.end.max(range.end);
2953 }
2954 }
2955 }
2956 }
2957
2958 let mut error_ranges = Vec::<Range<Point>>::new();
2959 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2960 grammar.error_query.as_ref()
2961 });
2962 while let Some(mat) = matches.peek() {
2963 let node = mat.captures[0].node;
2964 let start = Point::from_ts_point(node.start_position());
2965 let end = Point::from_ts_point(node.end_position());
2966 let range = start..end;
2967 let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
2968 Ok(ix) | Err(ix) => ix,
2969 };
2970 let mut end_ix = ix;
2971 while let Some(existing_range) = error_ranges.get(end_ix) {
2972 if existing_range.end < end {
2973 end_ix += 1;
2974 } else {
2975 break;
2976 }
2977 }
2978 error_ranges.splice(ix..end_ix, [range]);
2979 matches.advance();
2980 }
2981
2982 // we don't use outdent positions to truncate in case of significant indentation
2983 // rather we use them to expand (handled above)
2984 if !significant_indentation {
2985 outdent_positions.sort();
2986 for outdent_position in outdent_positions {
2987 // find the innermost indent range containing this outdent_position
2988 // set its end to the outdent position
2989 if let Some(range_to_truncate) = indent_ranges
2990 .iter_mut()
2991 .filter(|indent_range| indent_range.contains(&outdent_position))
2992 .next_back()
2993 {
2994 range_to_truncate.end = outdent_position;
2995 }
2996 }
2997 }
2998
2999 // Find the suggested indentation increases and decreased based on regexes.
3000 let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
3001 self.for_each_line(
3002 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
3003 ..Point::new(row_range.end, 0),
3004 |row, line| {
3005 if config
3006 .decrease_indent_pattern
3007 .as_ref()
3008 .map_or(false, |regex| regex.is_match(line))
3009 {
3010 indent_change_rows.push((row, Ordering::Less));
3011 }
3012 if config
3013 .increase_indent_pattern
3014 .as_ref()
3015 .map_or(false, |regex| regex.is_match(line))
3016 {
3017 indent_change_rows.push((row + 1, Ordering::Greater));
3018 }
3019 },
3020 );
3021
3022 let mut indent_changes = indent_change_rows.into_iter().peekable();
3023 let mut prev_row = if config.auto_indent_using_last_non_empty_line {
3024 prev_non_blank_row.unwrap_or(0)
3025 } else {
3026 row_range.start.saturating_sub(1)
3027 };
3028 let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
3029 Some(row_range.map(move |row| {
3030 let row_start = Point::new(row, self.indent_size_for_line(row).len);
3031
3032 let mut indent_from_prev_row = false;
3033 let mut outdent_from_prev_row = false;
3034 let mut outdent_to_row = u32::MAX;
3035 let mut from_regex = false;
3036
3037 while let Some((indent_row, delta)) = indent_changes.peek() {
3038 match indent_row.cmp(&row) {
3039 Ordering::Equal => match delta {
3040 Ordering::Less => {
3041 from_regex = true;
3042 outdent_from_prev_row = true
3043 }
3044 Ordering::Greater => {
3045 indent_from_prev_row = true;
3046 from_regex = true
3047 }
3048 _ => {}
3049 },
3050
3051 Ordering::Greater => break,
3052 Ordering::Less => {}
3053 }
3054
3055 indent_changes.next();
3056 }
3057
3058 for range in &indent_ranges {
3059 if range.start.row >= row {
3060 break;
3061 }
3062 if range.start.row == prev_row && range.end > row_start {
3063 indent_from_prev_row = true;
3064 }
3065 if significant_indentation && self.is_line_blank(row) && range.start.row == prev_row
3066 {
3067 indent_from_prev_row = true;
3068 }
3069 if !significant_indentation || !self.is_line_blank(row) {
3070 if range.end > prev_row_start && range.end <= row_start {
3071 outdent_to_row = outdent_to_row.min(range.start.row);
3072 }
3073 }
3074 }
3075
3076 let within_error = error_ranges
3077 .iter()
3078 .any(|e| e.start.row < row && e.end > row_start);
3079
3080 let suggestion = if outdent_to_row == prev_row
3081 || (outdent_from_prev_row && indent_from_prev_row)
3082 {
3083 Some(IndentSuggestion {
3084 basis_row: prev_row,
3085 delta: Ordering::Equal,
3086 within_error: within_error && !from_regex,
3087 })
3088 } else if indent_from_prev_row {
3089 Some(IndentSuggestion {
3090 basis_row: prev_row,
3091 delta: Ordering::Greater,
3092 within_error: within_error && !from_regex,
3093 })
3094 } else if outdent_to_row < prev_row {
3095 Some(IndentSuggestion {
3096 basis_row: outdent_to_row,
3097 delta: Ordering::Equal,
3098 within_error: within_error && !from_regex,
3099 })
3100 } else if outdent_from_prev_row {
3101 Some(IndentSuggestion {
3102 basis_row: prev_row,
3103 delta: Ordering::Less,
3104 within_error: within_error && !from_regex,
3105 })
3106 } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
3107 {
3108 Some(IndentSuggestion {
3109 basis_row: prev_row,
3110 delta: Ordering::Equal,
3111 within_error: within_error && !from_regex,
3112 })
3113 } else {
3114 None
3115 };
3116
3117 prev_row = row;
3118 prev_row_start = row_start;
3119 suggestion
3120 }))
3121 }
3122
3123 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
3124 while row > 0 {
3125 row -= 1;
3126 if !self.is_line_blank(row) {
3127 return Some(row);
3128 }
3129 }
3130 None
3131 }
3132
3133 fn get_highlights(&self, range: Range<usize>) -> (SyntaxMapCaptures<'_>, Vec<HighlightMap>) {
3134 let captures = self.syntax.captures(range, &self.text, |grammar| {
3135 grammar.highlights_query.as_ref()
3136 });
3137 let highlight_maps = captures
3138 .grammars()
3139 .iter()
3140 .map(|grammar| grammar.highlight_map())
3141 .collect();
3142 (captures, highlight_maps)
3143 }
3144
3145 /// Iterates over chunks of text in the given range of the buffer. Text is chunked
3146 /// in an arbitrary way due to being stored in a [`Rope`](text::Rope). The text is also
3147 /// returned in chunks where each chunk has a single syntax highlighting style and
3148 /// diagnostic status.
3149 pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks<'_> {
3150 let range = range.start.to_offset(self)..range.end.to_offset(self);
3151
3152 let mut syntax = None;
3153 if language_aware {
3154 syntax = Some(self.get_highlights(range.clone()));
3155 }
3156 // We want to look at diagnostic spans only when iterating over language-annotated chunks.
3157 let diagnostics = language_aware;
3158 BufferChunks::new(self.text.as_rope(), range, syntax, diagnostics, Some(self))
3159 }
3160
3161 pub fn highlighted_text_for_range<T: ToOffset>(
3162 &self,
3163 range: Range<T>,
3164 override_style: Option<HighlightStyle>,
3165 syntax_theme: &SyntaxTheme,
3166 ) -> HighlightedText {
3167 HighlightedText::from_buffer_range(
3168 range,
3169 &self.text,
3170 &self.syntax,
3171 override_style,
3172 syntax_theme,
3173 )
3174 }
3175
3176 /// Invokes the given callback for each line of text in the given range of the buffer.
3177 /// Uses callback to avoid allocating a string for each line.
3178 fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
3179 let mut line = String::new();
3180 let mut row = range.start.row;
3181 for chunk in self
3182 .as_rope()
3183 .chunks_in_range(range.to_offset(self))
3184 .chain(["\n"])
3185 {
3186 for (newline_ix, text) in chunk.split('\n').enumerate() {
3187 if newline_ix > 0 {
3188 callback(row, &line);
3189 row += 1;
3190 line.clear();
3191 }
3192 line.push_str(text);
3193 }
3194 }
3195 }
3196
3197 /// Iterates over every [`SyntaxLayer`] in the buffer.
3198 pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3199 self.syntax
3200 .layers_for_range(0..self.len(), &self.text, true)
3201 }
3202
3203 pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayer<'_>> {
3204 let offset = position.to_offset(self);
3205 self.syntax
3206 .layers_for_range(offset..offset, &self.text, false)
3207 .filter(|l| l.node().end_byte() > offset)
3208 .last()
3209 }
3210
3211 pub fn smallest_syntax_layer_containing<D: ToOffset>(
3212 &self,
3213 range: Range<D>,
3214 ) -> Option<SyntaxLayer<'_>> {
3215 let range = range.to_offset(self);
3216 return self
3217 .syntax
3218 .layers_for_range(range, &self.text, false)
3219 .max_by(|a, b| {
3220 if a.depth != b.depth {
3221 a.depth.cmp(&b.depth)
3222 } else if a.offset.0 != b.offset.0 {
3223 a.offset.0.cmp(&b.offset.0)
3224 } else {
3225 a.node().end_byte().cmp(&b.node().end_byte()).reverse()
3226 }
3227 });
3228 }
3229
3230 /// Returns the main [`Language`].
3231 pub fn language(&self) -> Option<&Arc<Language>> {
3232 self.language.as_ref()
3233 }
3234
3235 /// Returns the [`Language`] at the given location.
3236 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
3237 self.syntax_layer_at(position)
3238 .map(|info| info.language)
3239 .or(self.language.as_ref())
3240 }
3241
3242 /// Returns the settings for the language at the given location.
3243 pub fn settings_at<'a, D: ToOffset>(
3244 &'a self,
3245 position: D,
3246 cx: &'a App,
3247 ) -> Cow<'a, LanguageSettings> {
3248 language_settings(
3249 self.language_at(position).map(|l| l.name()),
3250 self.file.as_ref(),
3251 cx,
3252 )
3253 }
3254
3255 pub fn char_classifier_at<T: ToOffset>(&self, point: T) -> CharClassifier {
3256 CharClassifier::new(self.language_scope_at(point))
3257 }
3258
3259 /// Returns the [`LanguageScope`] at the given location.
3260 pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
3261 let offset = position.to_offset(self);
3262 let mut scope = None;
3263 let mut smallest_range_and_depth: Option<(Range<usize>, usize)> = None;
3264
3265 // Use the layer that has the smallest node intersecting the given point.
3266 for layer in self
3267 .syntax
3268 .layers_for_range(offset..offset, &self.text, false)
3269 {
3270 let mut cursor = layer.node().walk();
3271
3272 let mut range = None;
3273 loop {
3274 let child_range = cursor.node().byte_range();
3275 if !child_range.contains(&offset) {
3276 break;
3277 }
3278
3279 range = Some(child_range);
3280 if cursor.goto_first_child_for_byte(offset).is_none() {
3281 break;
3282 }
3283 }
3284
3285 if let Some(range) = range {
3286 if smallest_range_and_depth.as_ref().map_or(
3287 true,
3288 |(smallest_range, smallest_range_depth)| {
3289 if layer.depth > *smallest_range_depth {
3290 true
3291 } else if layer.depth == *smallest_range_depth {
3292 range.len() < smallest_range.len()
3293 } else {
3294 false
3295 }
3296 },
3297 ) {
3298 smallest_range_and_depth = Some((range, layer.depth));
3299 scope = Some(LanguageScope {
3300 language: layer.language.clone(),
3301 override_id: layer.override_id(offset, &self.text),
3302 });
3303 }
3304 }
3305 }
3306
3307 scope.or_else(|| {
3308 self.language.clone().map(|language| LanguageScope {
3309 language,
3310 override_id: None,
3311 })
3312 })
3313 }
3314
3315 /// Returns a tuple of the range and character kind of the word
3316 /// surrounding the given position.
3317 pub fn surrounding_word<T: ToOffset>(&self, start: T) -> (Range<usize>, Option<CharKind>) {
3318 let mut start = start.to_offset(self);
3319 let mut end = start;
3320 let mut next_chars = self.chars_at(start).take(128).peekable();
3321 let mut prev_chars = self.reversed_chars_at(start).take(128).peekable();
3322
3323 let classifier = self.char_classifier_at(start);
3324 let word_kind = cmp::max(
3325 prev_chars.peek().copied().map(|c| classifier.kind(c)),
3326 next_chars.peek().copied().map(|c| classifier.kind(c)),
3327 );
3328
3329 for ch in prev_chars {
3330 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3331 start -= ch.len_utf8();
3332 } else {
3333 break;
3334 }
3335 }
3336
3337 for ch in next_chars {
3338 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3339 end += ch.len_utf8();
3340 } else {
3341 break;
3342 }
3343 }
3344
3345 (start..end, word_kind)
3346 }
3347
3348 /// Returns the closest syntax node enclosing the given range.
3349 pub fn syntax_ancestor<'a, T: ToOffset>(
3350 &'a self,
3351 range: Range<T>,
3352 ) -> Option<tree_sitter::Node<'a>> {
3353 let range = range.start.to_offset(self)..range.end.to_offset(self);
3354 let mut result: Option<tree_sitter::Node<'a>> = None;
3355 'outer: for layer in self
3356 .syntax
3357 .layers_for_range(range.clone(), &self.text, true)
3358 {
3359 let mut cursor = layer.node().walk();
3360
3361 // Descend to the first leaf that touches the start of the range.
3362 //
3363 // If the range is non-empty and the current node ends exactly at the start,
3364 // move to the next sibling to find a node that extends beyond the start.
3365 //
3366 // If the range is empty and the current node starts after the range position,
3367 // move to the previous sibling to find the node that contains the position.
3368 while cursor.goto_first_child_for_byte(range.start).is_some() {
3369 if !range.is_empty() && cursor.node().end_byte() == range.start {
3370 cursor.goto_next_sibling();
3371 }
3372 if range.is_empty() && cursor.node().start_byte() > range.start {
3373 cursor.goto_previous_sibling();
3374 }
3375 }
3376
3377 // Ascend to the smallest ancestor that strictly contains the range.
3378 loop {
3379 let node_range = cursor.node().byte_range();
3380 if node_range.start <= range.start
3381 && node_range.end >= range.end
3382 && node_range.len() > range.len()
3383 {
3384 break;
3385 }
3386 if !cursor.goto_parent() {
3387 continue 'outer;
3388 }
3389 }
3390
3391 let left_node = cursor.node();
3392 let mut layer_result = left_node;
3393
3394 // For an empty range, try to find another node immediately to the right of the range.
3395 if left_node.end_byte() == range.start {
3396 let mut right_node = None;
3397 while !cursor.goto_next_sibling() {
3398 if !cursor.goto_parent() {
3399 break;
3400 }
3401 }
3402
3403 while cursor.node().start_byte() == range.start {
3404 right_node = Some(cursor.node());
3405 if !cursor.goto_first_child() {
3406 break;
3407 }
3408 }
3409
3410 // If there is a candidate node on both sides of the (empty) range, then
3411 // decide between the two by favoring a named node over an anonymous token.
3412 // If both nodes are the same in that regard, favor the right one.
3413 if let Some(right_node) = right_node {
3414 if right_node.is_named() || !left_node.is_named() {
3415 layer_result = right_node;
3416 }
3417 }
3418 }
3419
3420 if let Some(previous_result) = &result {
3421 if previous_result.byte_range().len() < layer_result.byte_range().len() {
3422 continue;
3423 }
3424 }
3425 result = Some(layer_result);
3426 }
3427
3428 result
3429 }
3430
3431 /// Returns the root syntax node within the given row
3432 pub fn syntax_root_ancestor(&self, position: Anchor) -> Option<tree_sitter::Node<'_>> {
3433 let start_offset = position.to_offset(self);
3434
3435 let row = self.summary_for_anchor::<text::PointUtf16>(&position).row as usize;
3436
3437 let layer = self
3438 .syntax
3439 .layers_for_range(start_offset..start_offset, &self.text, true)
3440 .next()?;
3441
3442 let mut cursor = layer.node().walk();
3443
3444 // Descend to the first leaf that touches the start of the range.
3445 while cursor.goto_first_child_for_byte(start_offset).is_some() {
3446 if cursor.node().end_byte() == start_offset {
3447 cursor.goto_next_sibling();
3448 }
3449 }
3450
3451 // Ascend to the root node within the same row.
3452 while cursor.goto_parent() {
3453 if cursor.node().start_position().row != row {
3454 break;
3455 }
3456 }
3457
3458 return Some(cursor.node());
3459 }
3460
3461 /// Returns the outline for the buffer.
3462 ///
3463 /// This method allows passing an optional [`SyntaxTheme`] to
3464 /// syntax-highlight the returned symbols.
3465 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Option<Outline<Anchor>> {
3466 self.outline_items_containing(0..self.len(), true, theme)
3467 .map(Outline::new)
3468 }
3469
3470 /// Returns all the symbols that contain the given position.
3471 ///
3472 /// This method allows passing an optional [`SyntaxTheme`] to
3473 /// syntax-highlight the returned symbols.
3474 pub fn symbols_containing<T: ToOffset>(
3475 &self,
3476 position: T,
3477 theme: Option<&SyntaxTheme>,
3478 ) -> Option<Vec<OutlineItem<Anchor>>> {
3479 let position = position.to_offset(self);
3480 let mut items = self.outline_items_containing(
3481 position.saturating_sub(1)..self.len().min(position + 1),
3482 false,
3483 theme,
3484 )?;
3485 let mut prev_depth = None;
3486 items.retain(|item| {
3487 let result = prev_depth.map_or(true, |prev_depth| item.depth > prev_depth);
3488 prev_depth = Some(item.depth);
3489 result
3490 });
3491 Some(items)
3492 }
3493
3494 pub fn outline_range_containing<T: ToOffset>(&self, range: Range<T>) -> Option<Range<Point>> {
3495 let range = range.to_offset(self);
3496 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3497 grammar.outline_config.as_ref().map(|c| &c.query)
3498 });
3499 let configs = matches
3500 .grammars()
3501 .iter()
3502 .map(|g| g.outline_config.as_ref().unwrap())
3503 .collect::<Vec<_>>();
3504
3505 while let Some(mat) = matches.peek() {
3506 let config = &configs[mat.grammar_index];
3507 let containing_item_node = maybe!({
3508 let item_node = mat.captures.iter().find_map(|cap| {
3509 if cap.index == config.item_capture_ix {
3510 Some(cap.node)
3511 } else {
3512 None
3513 }
3514 })?;
3515
3516 let item_byte_range = item_node.byte_range();
3517 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3518 None
3519 } else {
3520 Some(item_node)
3521 }
3522 });
3523
3524 if let Some(item_node) = containing_item_node {
3525 return Some(
3526 Point::from_ts_point(item_node.start_position())
3527 ..Point::from_ts_point(item_node.end_position()),
3528 );
3529 }
3530
3531 matches.advance();
3532 }
3533 None
3534 }
3535
3536 pub fn outline_items_containing<T: ToOffset>(
3537 &self,
3538 range: Range<T>,
3539 include_extra_context: bool,
3540 theme: Option<&SyntaxTheme>,
3541 ) -> Option<Vec<OutlineItem<Anchor>>> {
3542 let range = range.to_offset(self);
3543 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3544 grammar.outline_config.as_ref().map(|c| &c.query)
3545 });
3546 let configs = matches
3547 .grammars()
3548 .iter()
3549 .map(|g| g.outline_config.as_ref().unwrap())
3550 .collect::<Vec<_>>();
3551
3552 let mut items = Vec::new();
3553 let mut annotation_row_ranges: Vec<Range<u32>> = Vec::new();
3554 while let Some(mat) = matches.peek() {
3555 let config = &configs[mat.grammar_index];
3556 if let Some(item) =
3557 self.next_outline_item(config, &mat, &range, include_extra_context, theme)
3558 {
3559 items.push(item);
3560 } else if let Some(capture) = mat
3561 .captures
3562 .iter()
3563 .find(|capture| Some(capture.index) == config.annotation_capture_ix)
3564 {
3565 let capture_range = capture.node.start_position()..capture.node.end_position();
3566 let mut capture_row_range =
3567 capture_range.start.row as u32..capture_range.end.row as u32;
3568 if capture_range.end.row > capture_range.start.row && capture_range.end.column == 0
3569 {
3570 capture_row_range.end -= 1;
3571 }
3572 if let Some(last_row_range) = annotation_row_ranges.last_mut() {
3573 if last_row_range.end >= capture_row_range.start.saturating_sub(1) {
3574 last_row_range.end = capture_row_range.end;
3575 } else {
3576 annotation_row_ranges.push(capture_row_range);
3577 }
3578 } else {
3579 annotation_row_ranges.push(capture_row_range);
3580 }
3581 }
3582 matches.advance();
3583 }
3584
3585 items.sort_by_key(|item| (item.range.start, Reverse(item.range.end)));
3586
3587 // Assign depths based on containment relationships and convert to anchors.
3588 let mut item_ends_stack = Vec::<Point>::new();
3589 let mut anchor_items = Vec::new();
3590 let mut annotation_row_ranges = annotation_row_ranges.into_iter().peekable();
3591 for item in items {
3592 while let Some(last_end) = item_ends_stack.last().copied() {
3593 if last_end < item.range.end {
3594 item_ends_stack.pop();
3595 } else {
3596 break;
3597 }
3598 }
3599
3600 let mut annotation_row_range = None;
3601 while let Some(next_annotation_row_range) = annotation_row_ranges.peek() {
3602 let row_preceding_item = item.range.start.row.saturating_sub(1);
3603 if next_annotation_row_range.end < row_preceding_item {
3604 annotation_row_ranges.next();
3605 } else {
3606 if next_annotation_row_range.end == row_preceding_item {
3607 annotation_row_range = Some(next_annotation_row_range.clone());
3608 annotation_row_ranges.next();
3609 }
3610 break;
3611 }
3612 }
3613
3614 anchor_items.push(OutlineItem {
3615 depth: item_ends_stack.len(),
3616 range: self.anchor_after(item.range.start)..self.anchor_before(item.range.end),
3617 text: item.text,
3618 highlight_ranges: item.highlight_ranges,
3619 name_ranges: item.name_ranges,
3620 body_range: item.body_range.map(|body_range| {
3621 self.anchor_after(body_range.start)..self.anchor_before(body_range.end)
3622 }),
3623 annotation_range: annotation_row_range.map(|annotation_range| {
3624 self.anchor_after(Point::new(annotation_range.start, 0))
3625 ..self.anchor_before(Point::new(
3626 annotation_range.end,
3627 self.line_len(annotation_range.end),
3628 ))
3629 }),
3630 });
3631 item_ends_stack.push(item.range.end);
3632 }
3633
3634 Some(anchor_items)
3635 }
3636
3637 fn next_outline_item(
3638 &self,
3639 config: &OutlineConfig,
3640 mat: &SyntaxMapMatch,
3641 range: &Range<usize>,
3642 include_extra_context: bool,
3643 theme: Option<&SyntaxTheme>,
3644 ) -> Option<OutlineItem<Point>> {
3645 let item_node = mat.captures.iter().find_map(|cap| {
3646 if cap.index == config.item_capture_ix {
3647 Some(cap.node)
3648 } else {
3649 None
3650 }
3651 })?;
3652
3653 let item_byte_range = item_node.byte_range();
3654 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3655 return None;
3656 }
3657 let item_point_range = Point::from_ts_point(item_node.start_position())
3658 ..Point::from_ts_point(item_node.end_position());
3659
3660 let mut open_point = None;
3661 let mut close_point = None;
3662 let mut buffer_ranges = Vec::new();
3663 for capture in mat.captures {
3664 let node_is_name;
3665 if capture.index == config.name_capture_ix {
3666 node_is_name = true;
3667 } else if Some(capture.index) == config.context_capture_ix
3668 || (Some(capture.index) == config.extra_context_capture_ix && include_extra_context)
3669 {
3670 node_is_name = false;
3671 } else {
3672 if Some(capture.index) == config.open_capture_ix {
3673 open_point = Some(Point::from_ts_point(capture.node.end_position()));
3674 } else if Some(capture.index) == config.close_capture_ix {
3675 close_point = Some(Point::from_ts_point(capture.node.start_position()));
3676 }
3677
3678 continue;
3679 }
3680
3681 let mut range = capture.node.start_byte()..capture.node.end_byte();
3682 let start = capture.node.start_position();
3683 if capture.node.end_position().row > start.row {
3684 range.end = range.start + self.line_len(start.row as u32) as usize - start.column;
3685 }
3686
3687 if !range.is_empty() {
3688 buffer_ranges.push((range, node_is_name));
3689 }
3690 }
3691 if buffer_ranges.is_empty() {
3692 return None;
3693 }
3694 let mut text = String::new();
3695 let mut highlight_ranges = Vec::new();
3696 let mut name_ranges = Vec::new();
3697 let mut chunks = self.chunks(
3698 buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end,
3699 true,
3700 );
3701 let mut last_buffer_range_end = 0;
3702
3703 for (buffer_range, is_name) in buffer_ranges {
3704 let space_added = !text.is_empty() && buffer_range.start > last_buffer_range_end;
3705 if space_added {
3706 text.push(' ');
3707 }
3708 let before_append_len = text.len();
3709 let mut offset = buffer_range.start;
3710 chunks.seek(buffer_range.clone());
3711 for mut chunk in chunks.by_ref() {
3712 if chunk.text.len() > buffer_range.end - offset {
3713 chunk.text = &chunk.text[0..(buffer_range.end - offset)];
3714 offset = buffer_range.end;
3715 } else {
3716 offset += chunk.text.len();
3717 }
3718 let style = chunk
3719 .syntax_highlight_id
3720 .zip(theme)
3721 .and_then(|(highlight, theme)| highlight.style(theme));
3722 if let Some(style) = style {
3723 let start = text.len();
3724 let end = start + chunk.text.len();
3725 highlight_ranges.push((start..end, style));
3726 }
3727 text.push_str(chunk.text);
3728 if offset >= buffer_range.end {
3729 break;
3730 }
3731 }
3732 if is_name {
3733 let after_append_len = text.len();
3734 let start = if space_added && !name_ranges.is_empty() {
3735 before_append_len - 1
3736 } else {
3737 before_append_len
3738 };
3739 name_ranges.push(start..after_append_len);
3740 }
3741 last_buffer_range_end = buffer_range.end;
3742 }
3743
3744 Some(OutlineItem {
3745 depth: 0, // We'll calculate the depth later
3746 range: item_point_range,
3747 text,
3748 highlight_ranges,
3749 name_ranges,
3750 body_range: open_point.zip(close_point).map(|(start, end)| start..end),
3751 annotation_range: None,
3752 })
3753 }
3754
3755 pub fn function_body_fold_ranges<T: ToOffset>(
3756 &self,
3757 within: Range<T>,
3758 ) -> impl Iterator<Item = Range<usize>> + '_ {
3759 self.text_object_ranges(within, TreeSitterOptions::default())
3760 .filter_map(|(range, obj)| (obj == TextObject::InsideFunction).then_some(range))
3761 }
3762
3763 /// For each grammar in the language, runs the provided
3764 /// [`tree_sitter::Query`] against the given range.
3765 pub fn matches(
3766 &self,
3767 range: Range<usize>,
3768 query: fn(&Grammar) -> Option<&tree_sitter::Query>,
3769 ) -> SyntaxMapMatches<'_> {
3770 self.syntax.matches(range, self, query)
3771 }
3772
3773 pub fn all_bracket_ranges(
3774 &self,
3775 range: Range<usize>,
3776 ) -> impl Iterator<Item = BracketMatch> + '_ {
3777 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3778 grammar.brackets_config.as_ref().map(|c| &c.query)
3779 });
3780 let configs = matches
3781 .grammars()
3782 .iter()
3783 .map(|grammar| grammar.brackets_config.as_ref().unwrap())
3784 .collect::<Vec<_>>();
3785
3786 iter::from_fn(move || {
3787 while let Some(mat) = matches.peek() {
3788 let mut open = None;
3789 let mut close = None;
3790 let config = &configs[mat.grammar_index];
3791 let pattern = &config.patterns[mat.pattern_index];
3792 for capture in mat.captures {
3793 if capture.index == config.open_capture_ix {
3794 open = Some(capture.node.byte_range());
3795 } else if capture.index == config.close_capture_ix {
3796 close = Some(capture.node.byte_range());
3797 }
3798 }
3799
3800 matches.advance();
3801
3802 let Some((open_range, close_range)) = open.zip(close) else {
3803 continue;
3804 };
3805
3806 let bracket_range = open_range.start..=close_range.end;
3807 if !bracket_range.overlaps(&range) {
3808 continue;
3809 }
3810
3811 return Some(BracketMatch {
3812 open_range,
3813 close_range,
3814 newline_only: pattern.newline_only,
3815 });
3816 }
3817 None
3818 })
3819 }
3820
3821 /// Returns bracket range pairs overlapping or adjacent to `range`
3822 pub fn bracket_ranges<T: ToOffset>(
3823 &self,
3824 range: Range<T>,
3825 ) -> impl Iterator<Item = BracketMatch> + '_ {
3826 // Find bracket pairs that *inclusively* contain the given range.
3827 let range = range.start.to_offset(self).saturating_sub(1)
3828 ..self.len().min(range.end.to_offset(self) + 1);
3829 self.all_bracket_ranges(range)
3830 .filter(|pair| !pair.newline_only)
3831 }
3832
3833 pub fn text_object_ranges<T: ToOffset>(
3834 &self,
3835 range: Range<T>,
3836 options: TreeSitterOptions,
3837 ) -> impl Iterator<Item = (Range<usize>, TextObject)> + '_ {
3838 let range = range.start.to_offset(self).saturating_sub(1)
3839 ..self.len().min(range.end.to_offset(self) + 1);
3840
3841 let mut matches =
3842 self.syntax
3843 .matches_with_options(range.clone(), &self.text, options, |grammar| {
3844 grammar.text_object_config.as_ref().map(|c| &c.query)
3845 });
3846
3847 let configs = matches
3848 .grammars()
3849 .iter()
3850 .map(|grammar| grammar.text_object_config.as_ref())
3851 .collect::<Vec<_>>();
3852
3853 let mut captures = Vec::<(Range<usize>, TextObject)>::new();
3854
3855 iter::from_fn(move || {
3856 loop {
3857 while let Some(capture) = captures.pop() {
3858 if capture.0.overlaps(&range) {
3859 return Some(capture);
3860 }
3861 }
3862
3863 let mat = matches.peek()?;
3864
3865 let Some(config) = configs[mat.grammar_index].as_ref() else {
3866 matches.advance();
3867 continue;
3868 };
3869
3870 for capture in mat.captures {
3871 let Some(ix) = config
3872 .text_objects_by_capture_ix
3873 .binary_search_by_key(&capture.index, |e| e.0)
3874 .ok()
3875 else {
3876 continue;
3877 };
3878 let text_object = config.text_objects_by_capture_ix[ix].1;
3879 let byte_range = capture.node.byte_range();
3880
3881 let mut found = false;
3882 for (range, existing) in captures.iter_mut() {
3883 if existing == &text_object {
3884 range.start = range.start.min(byte_range.start);
3885 range.end = range.end.max(byte_range.end);
3886 found = true;
3887 break;
3888 }
3889 }
3890
3891 if !found {
3892 captures.push((byte_range, text_object));
3893 }
3894 }
3895
3896 matches.advance();
3897 }
3898 })
3899 }
3900
3901 /// Returns enclosing bracket ranges containing the given range
3902 pub fn enclosing_bracket_ranges<T: ToOffset>(
3903 &self,
3904 range: Range<T>,
3905 ) -> impl Iterator<Item = BracketMatch> + '_ {
3906 let range = range.start.to_offset(self)..range.end.to_offset(self);
3907
3908 self.bracket_ranges(range.clone()).filter(move |pair| {
3909 pair.open_range.start <= range.start && pair.close_range.end >= range.end
3910 })
3911 }
3912
3913 /// Returns the smallest enclosing bracket ranges containing the given range or None if no brackets contain range
3914 ///
3915 /// Can optionally pass a range_filter to filter the ranges of brackets to consider
3916 pub fn innermost_enclosing_bracket_ranges<T: ToOffset>(
3917 &self,
3918 range: Range<T>,
3919 range_filter: Option<&dyn Fn(Range<usize>, Range<usize>) -> bool>,
3920 ) -> Option<(Range<usize>, Range<usize>)> {
3921 let range = range.start.to_offset(self)..range.end.to_offset(self);
3922
3923 // Get the ranges of the innermost pair of brackets.
3924 let mut result: Option<(Range<usize>, Range<usize>)> = None;
3925
3926 for pair in self.enclosing_bracket_ranges(range.clone()) {
3927 if let Some(range_filter) = range_filter {
3928 if !range_filter(pair.open_range.clone(), pair.close_range.clone()) {
3929 continue;
3930 }
3931 }
3932
3933 let len = pair.close_range.end - pair.open_range.start;
3934
3935 if let Some((existing_open, existing_close)) = &result {
3936 let existing_len = existing_close.end - existing_open.start;
3937 if len > existing_len {
3938 continue;
3939 }
3940 }
3941
3942 result = Some((pair.open_range, pair.close_range));
3943 }
3944
3945 result
3946 }
3947
3948 /// Returns anchor ranges for any matches of the redaction query.
3949 /// The buffer can be associated with multiple languages, and the redaction query associated with each
3950 /// will be run on the relevant section of the buffer.
3951 pub fn redacted_ranges<T: ToOffset>(
3952 &self,
3953 range: Range<T>,
3954 ) -> impl Iterator<Item = Range<usize>> + '_ {
3955 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
3956 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
3957 grammar
3958 .redactions_config
3959 .as_ref()
3960 .map(|config| &config.query)
3961 });
3962
3963 let configs = syntax_matches
3964 .grammars()
3965 .iter()
3966 .map(|grammar| grammar.redactions_config.as_ref())
3967 .collect::<Vec<_>>();
3968
3969 iter::from_fn(move || {
3970 let redacted_range = syntax_matches
3971 .peek()
3972 .and_then(|mat| {
3973 configs[mat.grammar_index].and_then(|config| {
3974 mat.captures
3975 .iter()
3976 .find(|capture| capture.index == config.redaction_capture_ix)
3977 })
3978 })
3979 .map(|mat| mat.node.byte_range());
3980 syntax_matches.advance();
3981 redacted_range
3982 })
3983 }
3984
3985 pub fn injections_intersecting_range<T: ToOffset>(
3986 &self,
3987 range: Range<T>,
3988 ) -> impl Iterator<Item = (Range<usize>, &Arc<Language>)> + '_ {
3989 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
3990
3991 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
3992 grammar
3993 .injection_config
3994 .as_ref()
3995 .map(|config| &config.query)
3996 });
3997
3998 let configs = syntax_matches
3999 .grammars()
4000 .iter()
4001 .map(|grammar| grammar.injection_config.as_ref())
4002 .collect::<Vec<_>>();
4003
4004 iter::from_fn(move || {
4005 let ranges = syntax_matches.peek().and_then(|mat| {
4006 let config = &configs[mat.grammar_index]?;
4007 let content_capture_range = mat.captures.iter().find_map(|capture| {
4008 if capture.index == config.content_capture_ix {
4009 Some(capture.node.byte_range())
4010 } else {
4011 None
4012 }
4013 })?;
4014 let language = self.language_at(content_capture_range.start)?;
4015 Some((content_capture_range, language))
4016 });
4017 syntax_matches.advance();
4018 ranges
4019 })
4020 }
4021
4022 pub fn runnable_ranges(
4023 &self,
4024 offset_range: Range<usize>,
4025 ) -> impl Iterator<Item = RunnableRange> + '_ {
4026 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4027 grammar.runnable_config.as_ref().map(|config| &config.query)
4028 });
4029
4030 let test_configs = syntax_matches
4031 .grammars()
4032 .iter()
4033 .map(|grammar| grammar.runnable_config.as_ref())
4034 .collect::<Vec<_>>();
4035
4036 iter::from_fn(move || {
4037 loop {
4038 let mat = syntax_matches.peek()?;
4039
4040 let test_range = test_configs[mat.grammar_index].and_then(|test_configs| {
4041 let mut run_range = None;
4042 let full_range = mat.captures.iter().fold(
4043 Range {
4044 start: usize::MAX,
4045 end: 0,
4046 },
4047 |mut acc, next| {
4048 let byte_range = next.node.byte_range();
4049 if acc.start > byte_range.start {
4050 acc.start = byte_range.start;
4051 }
4052 if acc.end < byte_range.end {
4053 acc.end = byte_range.end;
4054 }
4055 acc
4056 },
4057 );
4058 if full_range.start > full_range.end {
4059 // We did not find a full spanning range of this match.
4060 return None;
4061 }
4062 let extra_captures: SmallVec<[_; 1]> =
4063 SmallVec::from_iter(mat.captures.iter().filter_map(|capture| {
4064 test_configs
4065 .extra_captures
4066 .get(capture.index as usize)
4067 .cloned()
4068 .and_then(|tag_name| match tag_name {
4069 RunnableCapture::Named(name) => {
4070 Some((capture.node.byte_range(), name))
4071 }
4072 RunnableCapture::Run => {
4073 let _ = run_range.insert(capture.node.byte_range());
4074 None
4075 }
4076 })
4077 }));
4078 let run_range = run_range?;
4079 let tags = test_configs
4080 .query
4081 .property_settings(mat.pattern_index)
4082 .iter()
4083 .filter_map(|property| {
4084 if *property.key == *"tag" {
4085 property
4086 .value
4087 .as_ref()
4088 .map(|value| RunnableTag(value.to_string().into()))
4089 } else {
4090 None
4091 }
4092 })
4093 .collect();
4094 let extra_captures = extra_captures
4095 .into_iter()
4096 .map(|(range, name)| {
4097 (
4098 name.to_string(),
4099 self.text_for_range(range.clone()).collect::<String>(),
4100 )
4101 })
4102 .collect();
4103 // All tags should have the same range.
4104 Some(RunnableRange {
4105 run_range,
4106 full_range,
4107 runnable: Runnable {
4108 tags,
4109 language: mat.language,
4110 buffer: self.remote_id(),
4111 },
4112 extra_captures,
4113 buffer_id: self.remote_id(),
4114 })
4115 });
4116
4117 syntax_matches.advance();
4118 if test_range.is_some() {
4119 // It's fine for us to short-circuit on .peek()? returning None. We don't want to return None from this iter if we
4120 // had a capture that did not contain a run marker, hence we'll just loop around for the next capture.
4121 return test_range;
4122 }
4123 }
4124 })
4125 }
4126
4127 /// Returns selections for remote peers intersecting the given range.
4128 #[allow(clippy::type_complexity)]
4129 pub fn selections_in_range(
4130 &self,
4131 range: Range<Anchor>,
4132 include_local: bool,
4133 ) -> impl Iterator<
4134 Item = (
4135 ReplicaId,
4136 bool,
4137 CursorShape,
4138 impl Iterator<Item = &Selection<Anchor>> + '_,
4139 ),
4140 > + '_ {
4141 self.remote_selections
4142 .iter()
4143 .filter(move |(replica_id, set)| {
4144 (include_local || **replica_id != self.text.replica_id())
4145 && !set.selections.is_empty()
4146 })
4147 .map(move |(replica_id, set)| {
4148 let start_ix = match set.selections.binary_search_by(|probe| {
4149 probe.end.cmp(&range.start, self).then(Ordering::Greater)
4150 }) {
4151 Ok(ix) | Err(ix) => ix,
4152 };
4153 let end_ix = match set.selections.binary_search_by(|probe| {
4154 probe.start.cmp(&range.end, self).then(Ordering::Less)
4155 }) {
4156 Ok(ix) | Err(ix) => ix,
4157 };
4158
4159 (
4160 *replica_id,
4161 set.line_mode,
4162 set.cursor_shape,
4163 set.selections[start_ix..end_ix].iter(),
4164 )
4165 })
4166 }
4167
4168 /// Returns if the buffer contains any diagnostics.
4169 pub fn has_diagnostics(&self) -> bool {
4170 !self.diagnostics.is_empty()
4171 }
4172
4173 /// Returns all the diagnostics intersecting the given range.
4174 pub fn diagnostics_in_range<'a, T, O>(
4175 &'a self,
4176 search_range: Range<T>,
4177 reversed: bool,
4178 ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
4179 where
4180 T: 'a + Clone + ToOffset,
4181 O: 'a + FromAnchor,
4182 {
4183 let mut iterators: Vec<_> = self
4184 .diagnostics
4185 .iter()
4186 .map(|(_, collection)| {
4187 collection
4188 .range::<T, text::Anchor>(search_range.clone(), self, true, reversed)
4189 .peekable()
4190 })
4191 .collect();
4192
4193 std::iter::from_fn(move || {
4194 let (next_ix, _) = iterators
4195 .iter_mut()
4196 .enumerate()
4197 .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
4198 .min_by(|(_, a), (_, b)| {
4199 let cmp = a
4200 .range
4201 .start
4202 .cmp(&b.range.start, self)
4203 // when range is equal, sort by diagnostic severity
4204 .then(a.diagnostic.severity.cmp(&b.diagnostic.severity))
4205 // and stabilize order with group_id
4206 .then(a.diagnostic.group_id.cmp(&b.diagnostic.group_id));
4207 if reversed { cmp.reverse() } else { cmp }
4208 })?;
4209 iterators[next_ix]
4210 .next()
4211 .map(|DiagnosticEntry { range, diagnostic }| DiagnosticEntry {
4212 diagnostic,
4213 range: FromAnchor::from_anchor(&range.start, self)
4214 ..FromAnchor::from_anchor(&range.end, self),
4215 })
4216 })
4217 }
4218
4219 /// Returns all the diagnostic groups associated with the given
4220 /// language server ID. If no language server ID is provided,
4221 /// all diagnostics groups are returned.
4222 pub fn diagnostic_groups(
4223 &self,
4224 language_server_id: Option<LanguageServerId>,
4225 ) -> Vec<(LanguageServerId, DiagnosticGroup<Anchor>)> {
4226 let mut groups = Vec::new();
4227
4228 if let Some(language_server_id) = language_server_id {
4229 if let Ok(ix) = self
4230 .diagnostics
4231 .binary_search_by_key(&language_server_id, |e| e.0)
4232 {
4233 self.diagnostics[ix]
4234 .1
4235 .groups(language_server_id, &mut groups, self);
4236 }
4237 } else {
4238 for (language_server_id, diagnostics) in self.diagnostics.iter() {
4239 diagnostics.groups(*language_server_id, &mut groups, self);
4240 }
4241 }
4242
4243 groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
4244 let a_start = &group_a.entries[group_a.primary_ix].range.start;
4245 let b_start = &group_b.entries[group_b.primary_ix].range.start;
4246 a_start.cmp(b_start, self).then_with(|| id_a.cmp(id_b))
4247 });
4248
4249 groups
4250 }
4251
4252 /// Returns an iterator over the diagnostics for the given group.
4253 pub fn diagnostic_group<O>(
4254 &self,
4255 group_id: usize,
4256 ) -> impl Iterator<Item = DiagnosticEntry<O>> + '_
4257 where
4258 O: FromAnchor + 'static,
4259 {
4260 self.diagnostics
4261 .iter()
4262 .flat_map(move |(_, set)| set.group(group_id, self))
4263 }
4264
4265 /// An integer version number that accounts for all updates besides
4266 /// the buffer's text itself (which is versioned via a version vector).
4267 pub fn non_text_state_update_count(&self) -> usize {
4268 self.non_text_state_update_count
4269 }
4270
4271 /// An integer version that changes when the buffer's syntax changes.
4272 pub fn syntax_update_count(&self) -> usize {
4273 self.syntax.update_count()
4274 }
4275
4276 /// Returns a snapshot of underlying file.
4277 pub fn file(&self) -> Option<&Arc<dyn File>> {
4278 self.file.as_ref()
4279 }
4280
4281 /// Resolves the file path (relative to the worktree root) associated with the underlying file.
4282 pub fn resolve_file_path(&self, cx: &App, include_root: bool) -> Option<PathBuf> {
4283 if let Some(file) = self.file() {
4284 if file.path().file_name().is_none() || include_root {
4285 Some(file.full_path(cx))
4286 } else {
4287 Some(file.path().to_path_buf())
4288 }
4289 } else {
4290 None
4291 }
4292 }
4293
4294 pub fn words_in_range(&self, query: WordsQuery) -> BTreeMap<String, Range<Anchor>> {
4295 let query_str = query.fuzzy_contents;
4296 if query_str.map_or(false, |query| query.is_empty()) {
4297 return BTreeMap::default();
4298 }
4299
4300 let classifier = CharClassifier::new(self.language.clone().map(|language| LanguageScope {
4301 language,
4302 override_id: None,
4303 }));
4304
4305 let mut query_ix = 0;
4306 let query_chars = query_str.map(|query| query.chars().collect::<Vec<_>>());
4307 let query_len = query_chars.as_ref().map_or(0, |query| query.len());
4308
4309 let mut words = BTreeMap::default();
4310 let mut current_word_start_ix = None;
4311 let mut chunk_ix = query.range.start;
4312 for chunk in self.chunks(query.range, false) {
4313 for (i, c) in chunk.text.char_indices() {
4314 let ix = chunk_ix + i;
4315 if classifier.is_word(c) {
4316 if current_word_start_ix.is_none() {
4317 current_word_start_ix = Some(ix);
4318 }
4319
4320 if let Some(query_chars) = &query_chars {
4321 if query_ix < query_len {
4322 if c.to_lowercase().eq(query_chars[query_ix].to_lowercase()) {
4323 query_ix += 1;
4324 }
4325 }
4326 }
4327 continue;
4328 } else if let Some(word_start) = current_word_start_ix.take() {
4329 if query_ix == query_len {
4330 let word_range = self.anchor_before(word_start)..self.anchor_after(ix);
4331 let mut word_text = self.text_for_range(word_start..ix).peekable();
4332 let first_char = word_text
4333 .peek()
4334 .and_then(|first_chunk| first_chunk.chars().next());
4335 // Skip empty and "words" starting with digits as a heuristic to reduce useless completions
4336 if !query.skip_digits
4337 || first_char.map_or(true, |first_char| !first_char.is_digit(10))
4338 {
4339 words.insert(word_text.collect(), word_range);
4340 }
4341 }
4342 }
4343 query_ix = 0;
4344 }
4345 chunk_ix += chunk.text.len();
4346 }
4347
4348 words
4349 }
4350}
4351
4352pub struct WordsQuery<'a> {
4353 /// Only returns words with all chars from the fuzzy string in them.
4354 pub fuzzy_contents: Option<&'a str>,
4355 /// Skips words that start with a digit.
4356 pub skip_digits: bool,
4357 /// Buffer offset range, to look for words.
4358 pub range: Range<usize>,
4359}
4360
4361fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
4362 indent_size_for_text(text.chars_at(Point::new(row, 0)))
4363}
4364
4365fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
4366 let mut result = IndentSize::spaces(0);
4367 for c in text {
4368 let kind = match c {
4369 ' ' => IndentKind::Space,
4370 '\t' => IndentKind::Tab,
4371 _ => break,
4372 };
4373 if result.len == 0 {
4374 result.kind = kind;
4375 }
4376 result.len += 1;
4377 }
4378 result
4379}
4380
4381impl Clone for BufferSnapshot {
4382 fn clone(&self) -> Self {
4383 Self {
4384 text: self.text.clone(),
4385 syntax: self.syntax.clone(),
4386 file: self.file.clone(),
4387 remote_selections: self.remote_selections.clone(),
4388 diagnostics: self.diagnostics.clone(),
4389 language: self.language.clone(),
4390 non_text_state_update_count: self.non_text_state_update_count,
4391 }
4392 }
4393}
4394
4395impl Deref for BufferSnapshot {
4396 type Target = text::BufferSnapshot;
4397
4398 fn deref(&self) -> &Self::Target {
4399 &self.text
4400 }
4401}
4402
4403unsafe impl Send for BufferChunks<'_> {}
4404
4405impl<'a> BufferChunks<'a> {
4406 pub(crate) fn new(
4407 text: &'a Rope,
4408 range: Range<usize>,
4409 syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
4410 diagnostics: bool,
4411 buffer_snapshot: Option<&'a BufferSnapshot>,
4412 ) -> Self {
4413 let mut highlights = None;
4414 if let Some((captures, highlight_maps)) = syntax {
4415 highlights = Some(BufferChunkHighlights {
4416 captures,
4417 next_capture: None,
4418 stack: Default::default(),
4419 highlight_maps,
4420 })
4421 }
4422
4423 let diagnostic_endpoints = diagnostics.then(|| Vec::new().into_iter().peekable());
4424 let chunks = text.chunks_in_range(range.clone());
4425
4426 let mut this = BufferChunks {
4427 range,
4428 buffer_snapshot,
4429 chunks,
4430 diagnostic_endpoints,
4431 error_depth: 0,
4432 warning_depth: 0,
4433 information_depth: 0,
4434 hint_depth: 0,
4435 unnecessary_depth: 0,
4436 underline: true,
4437 highlights,
4438 };
4439 this.initialize_diagnostic_endpoints();
4440 this
4441 }
4442
4443 /// Seeks to the given byte offset in the buffer.
4444 pub fn seek(&mut self, range: Range<usize>) {
4445 let old_range = std::mem::replace(&mut self.range, range.clone());
4446 self.chunks.set_range(self.range.clone());
4447 if let Some(highlights) = self.highlights.as_mut() {
4448 if old_range.start <= self.range.start && old_range.end >= self.range.end {
4449 // Reuse existing highlights stack, as the new range is a subrange of the old one.
4450 highlights
4451 .stack
4452 .retain(|(end_offset, _)| *end_offset > range.start);
4453 if let Some(capture) = &highlights.next_capture {
4454 if range.start >= capture.node.start_byte() {
4455 let next_capture_end = capture.node.end_byte();
4456 if range.start < next_capture_end {
4457 highlights.stack.push((
4458 next_capture_end,
4459 highlights.highlight_maps[capture.grammar_index].get(capture.index),
4460 ));
4461 }
4462 highlights.next_capture.take();
4463 }
4464 }
4465 } else if let Some(snapshot) = self.buffer_snapshot {
4466 let (captures, highlight_maps) = snapshot.get_highlights(self.range.clone());
4467 *highlights = BufferChunkHighlights {
4468 captures,
4469 next_capture: None,
4470 stack: Default::default(),
4471 highlight_maps,
4472 };
4473 } else {
4474 // We cannot obtain new highlights for a language-aware buffer iterator, as we don't have a buffer snapshot.
4475 // Seeking such BufferChunks is not supported.
4476 debug_assert!(
4477 false,
4478 "Attempted to seek on a language-aware buffer iterator without associated buffer snapshot"
4479 );
4480 }
4481
4482 highlights.captures.set_byte_range(self.range.clone());
4483 self.initialize_diagnostic_endpoints();
4484 }
4485 }
4486
4487 fn initialize_diagnostic_endpoints(&mut self) {
4488 if let Some(diagnostics) = self.diagnostic_endpoints.as_mut() {
4489 if let Some(buffer) = self.buffer_snapshot {
4490 let mut diagnostic_endpoints = Vec::new();
4491 for entry in buffer.diagnostics_in_range::<_, usize>(self.range.clone(), false) {
4492 diagnostic_endpoints.push(DiagnosticEndpoint {
4493 offset: entry.range.start,
4494 is_start: true,
4495 severity: entry.diagnostic.severity,
4496 is_unnecessary: entry.diagnostic.is_unnecessary,
4497 underline: entry.diagnostic.underline,
4498 });
4499 diagnostic_endpoints.push(DiagnosticEndpoint {
4500 offset: entry.range.end,
4501 is_start: false,
4502 severity: entry.diagnostic.severity,
4503 is_unnecessary: entry.diagnostic.is_unnecessary,
4504 underline: entry.diagnostic.underline,
4505 });
4506 }
4507 diagnostic_endpoints
4508 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
4509 *diagnostics = diagnostic_endpoints.into_iter().peekable();
4510 self.hint_depth = 0;
4511 self.error_depth = 0;
4512 self.warning_depth = 0;
4513 self.information_depth = 0;
4514 }
4515 }
4516 }
4517
4518 /// The current byte offset in the buffer.
4519 pub fn offset(&self) -> usize {
4520 self.range.start
4521 }
4522
4523 pub fn range(&self) -> Range<usize> {
4524 self.range.clone()
4525 }
4526
4527 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
4528 let depth = match endpoint.severity {
4529 DiagnosticSeverity::ERROR => &mut self.error_depth,
4530 DiagnosticSeverity::WARNING => &mut self.warning_depth,
4531 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
4532 DiagnosticSeverity::HINT => &mut self.hint_depth,
4533 _ => return,
4534 };
4535 if endpoint.is_start {
4536 *depth += 1;
4537 } else {
4538 *depth -= 1;
4539 }
4540
4541 if endpoint.is_unnecessary {
4542 if endpoint.is_start {
4543 self.unnecessary_depth += 1;
4544 } else {
4545 self.unnecessary_depth -= 1;
4546 }
4547 }
4548 }
4549
4550 fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
4551 if self.error_depth > 0 {
4552 Some(DiagnosticSeverity::ERROR)
4553 } else if self.warning_depth > 0 {
4554 Some(DiagnosticSeverity::WARNING)
4555 } else if self.information_depth > 0 {
4556 Some(DiagnosticSeverity::INFORMATION)
4557 } else if self.hint_depth > 0 {
4558 Some(DiagnosticSeverity::HINT)
4559 } else {
4560 None
4561 }
4562 }
4563
4564 fn current_code_is_unnecessary(&self) -> bool {
4565 self.unnecessary_depth > 0
4566 }
4567}
4568
4569impl<'a> Iterator for BufferChunks<'a> {
4570 type Item = Chunk<'a>;
4571
4572 fn next(&mut self) -> Option<Self::Item> {
4573 let mut next_capture_start = usize::MAX;
4574 let mut next_diagnostic_endpoint = usize::MAX;
4575
4576 if let Some(highlights) = self.highlights.as_mut() {
4577 while let Some((parent_capture_end, _)) = highlights.stack.last() {
4578 if *parent_capture_end <= self.range.start {
4579 highlights.stack.pop();
4580 } else {
4581 break;
4582 }
4583 }
4584
4585 if highlights.next_capture.is_none() {
4586 highlights.next_capture = highlights.captures.next();
4587 }
4588
4589 while let Some(capture) = highlights.next_capture.as_ref() {
4590 if self.range.start < capture.node.start_byte() {
4591 next_capture_start = capture.node.start_byte();
4592 break;
4593 } else {
4594 let highlight_id =
4595 highlights.highlight_maps[capture.grammar_index].get(capture.index);
4596 highlights
4597 .stack
4598 .push((capture.node.end_byte(), highlight_id));
4599 highlights.next_capture = highlights.captures.next();
4600 }
4601 }
4602 }
4603
4604 let mut diagnostic_endpoints = std::mem::take(&mut self.diagnostic_endpoints);
4605 if let Some(diagnostic_endpoints) = diagnostic_endpoints.as_mut() {
4606 while let Some(endpoint) = diagnostic_endpoints.peek().copied() {
4607 if endpoint.offset <= self.range.start {
4608 self.update_diagnostic_depths(endpoint);
4609 diagnostic_endpoints.next();
4610 self.underline = endpoint.underline;
4611 } else {
4612 next_diagnostic_endpoint = endpoint.offset;
4613 break;
4614 }
4615 }
4616 }
4617 self.diagnostic_endpoints = diagnostic_endpoints;
4618
4619 if let Some(chunk) = self.chunks.peek() {
4620 let chunk_start = self.range.start;
4621 let mut chunk_end = (self.chunks.offset() + chunk.len())
4622 .min(next_capture_start)
4623 .min(next_diagnostic_endpoint);
4624 let mut highlight_id = None;
4625 if let Some(highlights) = self.highlights.as_ref() {
4626 if let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last() {
4627 chunk_end = chunk_end.min(*parent_capture_end);
4628 highlight_id = Some(*parent_highlight_id);
4629 }
4630 }
4631
4632 let slice =
4633 &chunk[chunk_start - self.chunks.offset()..chunk_end - self.chunks.offset()];
4634 self.range.start = chunk_end;
4635 if self.range.start == self.chunks.offset() + chunk.len() {
4636 self.chunks.next().unwrap();
4637 }
4638
4639 Some(Chunk {
4640 text: slice,
4641 syntax_highlight_id: highlight_id,
4642 underline: self.underline,
4643 diagnostic_severity: self.current_diagnostic_severity(),
4644 is_unnecessary: self.current_code_is_unnecessary(),
4645 ..Chunk::default()
4646 })
4647 } else {
4648 None
4649 }
4650 }
4651}
4652
4653impl operation_queue::Operation for Operation {
4654 fn lamport_timestamp(&self) -> clock::Lamport {
4655 match self {
4656 Operation::Buffer(_) => {
4657 unreachable!("buffer operations should never be deferred at this layer")
4658 }
4659 Operation::UpdateDiagnostics {
4660 lamport_timestamp, ..
4661 }
4662 | Operation::UpdateSelections {
4663 lamport_timestamp, ..
4664 }
4665 | Operation::UpdateCompletionTriggers {
4666 lamport_timestamp, ..
4667 } => *lamport_timestamp,
4668 }
4669 }
4670}
4671
4672impl Default for Diagnostic {
4673 fn default() -> Self {
4674 Self {
4675 source: Default::default(),
4676 source_kind: DiagnosticSourceKind::Other,
4677 code: None,
4678 code_description: None,
4679 severity: DiagnosticSeverity::ERROR,
4680 message: Default::default(),
4681 markdown: None,
4682 group_id: 0,
4683 is_primary: false,
4684 is_disk_based: false,
4685 is_unnecessary: false,
4686 underline: true,
4687 data: None,
4688 }
4689 }
4690}
4691
4692impl IndentSize {
4693 /// Returns an [`IndentSize`] representing the given spaces.
4694 pub fn spaces(len: u32) -> Self {
4695 Self {
4696 len,
4697 kind: IndentKind::Space,
4698 }
4699 }
4700
4701 /// Returns an [`IndentSize`] representing a tab.
4702 pub fn tab() -> Self {
4703 Self {
4704 len: 1,
4705 kind: IndentKind::Tab,
4706 }
4707 }
4708
4709 /// An iterator over the characters represented by this [`IndentSize`].
4710 pub fn chars(&self) -> impl Iterator<Item = char> {
4711 iter::repeat(self.char()).take(self.len as usize)
4712 }
4713
4714 /// The character representation of this [`IndentSize`].
4715 pub fn char(&self) -> char {
4716 match self.kind {
4717 IndentKind::Space => ' ',
4718 IndentKind::Tab => '\t',
4719 }
4720 }
4721
4722 /// Consumes the current [`IndentSize`] and returns a new one that has
4723 /// been shrunk or enlarged by the given size along the given direction.
4724 pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
4725 match direction {
4726 Ordering::Less => {
4727 if self.kind == size.kind && self.len >= size.len {
4728 self.len -= size.len;
4729 }
4730 }
4731 Ordering::Equal => {}
4732 Ordering::Greater => {
4733 if self.len == 0 {
4734 self = size;
4735 } else if self.kind == size.kind {
4736 self.len += size.len;
4737 }
4738 }
4739 }
4740 self
4741 }
4742
4743 pub fn len_with_expanded_tabs(&self, tab_size: NonZeroU32) -> usize {
4744 match self.kind {
4745 IndentKind::Space => self.len as usize,
4746 IndentKind::Tab => self.len as usize * tab_size.get() as usize,
4747 }
4748 }
4749}
4750
4751#[cfg(any(test, feature = "test-support"))]
4752pub struct TestFile {
4753 pub path: Arc<Path>,
4754 pub root_name: String,
4755 pub local_root: Option<PathBuf>,
4756}
4757
4758#[cfg(any(test, feature = "test-support"))]
4759impl File for TestFile {
4760 fn path(&self) -> &Arc<Path> {
4761 &self.path
4762 }
4763
4764 fn full_path(&self, _: &gpui::App) -> PathBuf {
4765 PathBuf::from(&self.root_name).join(self.path.as_ref())
4766 }
4767
4768 fn as_local(&self) -> Option<&dyn LocalFile> {
4769 if self.local_root.is_some() {
4770 Some(self)
4771 } else {
4772 None
4773 }
4774 }
4775
4776 fn disk_state(&self) -> DiskState {
4777 unimplemented!()
4778 }
4779
4780 fn file_name<'a>(&'a self, _: &'a gpui::App) -> &'a std::ffi::OsStr {
4781 self.path().file_name().unwrap_or(self.root_name.as_ref())
4782 }
4783
4784 fn worktree_id(&self, _: &App) -> WorktreeId {
4785 WorktreeId::from_usize(0)
4786 }
4787
4788 fn to_proto(&self, _: &App) -> rpc::proto::File {
4789 unimplemented!()
4790 }
4791
4792 fn is_private(&self) -> bool {
4793 false
4794 }
4795}
4796
4797#[cfg(any(test, feature = "test-support"))]
4798impl LocalFile for TestFile {
4799 fn abs_path(&self, _cx: &App) -> PathBuf {
4800 PathBuf::from(self.local_root.as_ref().unwrap())
4801 .join(&self.root_name)
4802 .join(self.path.as_ref())
4803 }
4804
4805 fn load(&self, _cx: &App) -> Task<Result<String>> {
4806 unimplemented!()
4807 }
4808
4809 fn load_bytes(&self, _cx: &App) -> Task<Result<Vec<u8>>> {
4810 unimplemented!()
4811 }
4812}
4813
4814pub(crate) fn contiguous_ranges(
4815 values: impl Iterator<Item = u32>,
4816 max_len: usize,
4817) -> impl Iterator<Item = Range<u32>> {
4818 let mut values = values;
4819 let mut current_range: Option<Range<u32>> = None;
4820 std::iter::from_fn(move || {
4821 loop {
4822 if let Some(value) = values.next() {
4823 if let Some(range) = &mut current_range {
4824 if value == range.end && range.len() < max_len {
4825 range.end += 1;
4826 continue;
4827 }
4828 }
4829
4830 let prev_range = current_range.clone();
4831 current_range = Some(value..(value + 1));
4832 if prev_range.is_some() {
4833 return prev_range;
4834 }
4835 } else {
4836 return current_range.take();
4837 }
4838 }
4839 })
4840}
4841
4842#[derive(Default, Debug)]
4843pub struct CharClassifier {
4844 scope: Option<LanguageScope>,
4845 for_completion: bool,
4846 ignore_punctuation: bool,
4847}
4848
4849impl CharClassifier {
4850 pub fn new(scope: Option<LanguageScope>) -> Self {
4851 Self {
4852 scope,
4853 for_completion: false,
4854 ignore_punctuation: false,
4855 }
4856 }
4857
4858 pub fn for_completion(self, for_completion: bool) -> Self {
4859 Self {
4860 for_completion,
4861 ..self
4862 }
4863 }
4864
4865 pub fn ignore_punctuation(self, ignore_punctuation: bool) -> Self {
4866 Self {
4867 ignore_punctuation,
4868 ..self
4869 }
4870 }
4871
4872 pub fn is_whitespace(&self, c: char) -> bool {
4873 self.kind(c) == CharKind::Whitespace
4874 }
4875
4876 pub fn is_word(&self, c: char) -> bool {
4877 self.kind(c) == CharKind::Word
4878 }
4879
4880 pub fn is_punctuation(&self, c: char) -> bool {
4881 self.kind(c) == CharKind::Punctuation
4882 }
4883
4884 pub fn kind_with(&self, c: char, ignore_punctuation: bool) -> CharKind {
4885 if c.is_alphanumeric() || c == '_' {
4886 return CharKind::Word;
4887 }
4888
4889 if let Some(scope) = &self.scope {
4890 let characters = if self.for_completion {
4891 scope.completion_query_characters()
4892 } else {
4893 scope.word_characters()
4894 };
4895 if let Some(characters) = characters {
4896 if characters.contains(&c) {
4897 return CharKind::Word;
4898 }
4899 }
4900 }
4901
4902 if c.is_whitespace() {
4903 return CharKind::Whitespace;
4904 }
4905
4906 if ignore_punctuation {
4907 CharKind::Word
4908 } else {
4909 CharKind::Punctuation
4910 }
4911 }
4912
4913 pub fn kind(&self, c: char) -> CharKind {
4914 self.kind_with(c, self.ignore_punctuation)
4915 }
4916}
4917
4918/// Find all of the ranges of whitespace that occur at the ends of lines
4919/// in the given rope.
4920///
4921/// This could also be done with a regex search, but this implementation
4922/// avoids copying text.
4923pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
4924 let mut ranges = Vec::new();
4925
4926 let mut offset = 0;
4927 let mut prev_chunk_trailing_whitespace_range = 0..0;
4928 for chunk in rope.chunks() {
4929 let mut prev_line_trailing_whitespace_range = 0..0;
4930 for (i, line) in chunk.split('\n').enumerate() {
4931 let line_end_offset = offset + line.len();
4932 let trimmed_line_len = line.trim_end_matches([' ', '\t']).len();
4933 let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
4934
4935 if i == 0 && trimmed_line_len == 0 {
4936 trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
4937 }
4938 if !prev_line_trailing_whitespace_range.is_empty() {
4939 ranges.push(prev_line_trailing_whitespace_range);
4940 }
4941
4942 offset = line_end_offset + 1;
4943 prev_line_trailing_whitespace_range = trailing_whitespace_range;
4944 }
4945
4946 offset -= 1;
4947 prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
4948 }
4949
4950 if !prev_chunk_trailing_whitespace_range.is_empty() {
4951 ranges.push(prev_chunk_trailing_whitespace_range);
4952 }
4953
4954 ranges
4955}