1pub use crate::{
2 Grammar, Language, LanguageRegistry,
3 diagnostic_set::DiagnosticSet,
4 highlight_map::{HighlightId, HighlightMap},
5 proto,
6};
7use crate::{
8 LanguageScope, Outline, OutlineConfig, RunnableCapture, RunnableTag, TextObject,
9 TreeSitterOptions,
10 diagnostic_set::{DiagnosticEntry, DiagnosticGroup},
11 language_settings::{LanguageSettings, language_settings},
12 outline::OutlineItem,
13 syntax_map::{
14 SyntaxLayer, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures, SyntaxMapMatch,
15 SyntaxMapMatches, SyntaxSnapshot, ToTreeSitterPoint,
16 },
17 task_context::RunnableRange,
18 text_diff::text_diff,
19};
20use anyhow::{Context as _, Result};
21use async_watch as watch;
22pub use clock::ReplicaId;
23use clock::{AGENT_REPLICA_ID, Lamport};
24use collections::HashMap;
25use fs::MTime;
26use futures::channel::oneshot;
27use gpui::{
28 App, AppContext as _, Context, Entity, EventEmitter, HighlightStyle, SharedString, StyledText,
29 Task, TaskLabel, TextStyle,
30};
31use lsp::{LanguageServerId, NumberOrString};
32use parking_lot::Mutex;
33use schemars::JsonSchema;
34use serde::{Deserialize, Serialize};
35use serde_json::Value;
36use settings::WorktreeId;
37use smallvec::SmallVec;
38use smol::future::yield_now;
39use std::{
40 any::Any,
41 borrow::Cow,
42 cell::Cell,
43 cmp::{self, Ordering, Reverse},
44 collections::{BTreeMap, BTreeSet},
45 ffi::OsStr,
46 future::Future,
47 iter::{self, Iterator, Peekable},
48 mem,
49 num::NonZeroU32,
50 ops::{Deref, Range},
51 path::{Path, PathBuf},
52 rc,
53 sync::{Arc, LazyLock},
54 time::{Duration, Instant},
55 vec,
56};
57use sum_tree::TreeMap;
58use text::operation_queue::OperationQueue;
59use text::*;
60pub use text::{
61 Anchor, Bias, Buffer as TextBuffer, BufferId, BufferSnapshot as TextBufferSnapshot, Edit,
62 LineIndent, OffsetRangeExt, OffsetUtf16, Patch, Point, PointUtf16, Rope, Selection,
63 SelectionGoal, Subscription, TextDimension, TextSummary, ToOffset, ToOffsetUtf16, ToPoint,
64 ToPointUtf16, Transaction, TransactionId, Unclipped,
65};
66use theme::{ActiveTheme as _, SyntaxTheme};
67#[cfg(any(test, feature = "test-support"))]
68use util::RandomCharIter;
69use util::{RangeExt, debug_panic, maybe};
70
71#[cfg(any(test, feature = "test-support"))]
72pub use {tree_sitter_python, tree_sitter_rust, tree_sitter_typescript};
73
74pub use lsp::DiagnosticSeverity;
75
76/// A label for the background task spawned by the buffer to compute
77/// a diff against the contents of its file.
78pub static BUFFER_DIFF_TASK: LazyLock<TaskLabel> = LazyLock::new(TaskLabel::new);
79
80/// Indicate whether a [`Buffer`] has permissions to edit.
81#[derive(PartialEq, Clone, Copy, Debug)]
82pub enum Capability {
83 /// The buffer is a mutable replica.
84 ReadWrite,
85 /// The buffer is a read-only replica.
86 ReadOnly,
87}
88
89pub type BufferRow = u32;
90
91/// An in-memory representation of a source code file, including its text,
92/// syntax trees, git status, and diagnostics.
93pub struct Buffer {
94 text: TextBuffer,
95 branch_state: Option<BufferBranchState>,
96 /// Filesystem state, `None` when there is no path.
97 file: Option<Arc<dyn File>>,
98 /// The mtime of the file when this buffer was last loaded from
99 /// or saved to disk.
100 saved_mtime: Option<MTime>,
101 /// The version vector when this buffer was last loaded from
102 /// or saved to disk.
103 saved_version: clock::Global,
104 preview_version: clock::Global,
105 transaction_depth: usize,
106 was_dirty_before_starting_transaction: Option<bool>,
107 reload_task: Option<Task<Result<()>>>,
108 language: Option<Arc<Language>>,
109 autoindent_requests: Vec<Arc<AutoindentRequest>>,
110 pending_autoindent: Option<Task<()>>,
111 sync_parse_timeout: Duration,
112 syntax_map: Mutex<SyntaxMap>,
113 reparse: Option<Task<()>>,
114 parse_status: (watch::Sender<ParseStatus>, watch::Receiver<ParseStatus>),
115 non_text_state_update_count: usize,
116 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
117 remote_selections: TreeMap<ReplicaId, SelectionSet>,
118 diagnostics_timestamp: clock::Lamport,
119 completion_triggers: BTreeSet<String>,
120 completion_triggers_per_language_server: HashMap<LanguageServerId, BTreeSet<String>>,
121 completion_triggers_timestamp: clock::Lamport,
122 deferred_ops: OperationQueue<Operation>,
123 capability: Capability,
124 has_conflict: bool,
125 /// Memoize calls to has_changes_since(saved_version).
126 /// The contents of a cell are (self.version, has_changes) at the time of a last call.
127 has_unsaved_edits: Cell<(clock::Global, bool)>,
128 change_bits: Vec<rc::Weak<Cell<bool>>>,
129 _subscriptions: Vec<gpui::Subscription>,
130}
131
132#[derive(Copy, Clone, Debug, PartialEq, Eq)]
133pub enum ParseStatus {
134 Idle,
135 Parsing,
136}
137
138struct BufferBranchState {
139 base_buffer: Entity<Buffer>,
140 merged_operations: Vec<Lamport>,
141}
142
143/// An immutable, cheaply cloneable representation of a fixed
144/// state of a buffer.
145pub struct BufferSnapshot {
146 pub text: text::BufferSnapshot,
147 pub(crate) syntax: SyntaxSnapshot,
148 file: Option<Arc<dyn File>>,
149 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
150 remote_selections: TreeMap<ReplicaId, SelectionSet>,
151 language: Option<Arc<Language>>,
152 non_text_state_update_count: usize,
153}
154
155/// The kind and amount of indentation in a particular line. For now,
156/// assumes that indentation is all the same character.
157#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
158pub struct IndentSize {
159 /// The number of bytes that comprise the indentation.
160 pub len: u32,
161 /// The kind of whitespace used for indentation.
162 pub kind: IndentKind,
163}
164
165/// A whitespace character that's used for indentation.
166#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
167pub enum IndentKind {
168 /// An ASCII space character.
169 #[default]
170 Space,
171 /// An ASCII tab character.
172 Tab,
173}
174
175/// The shape of a selection cursor.
176#[derive(Copy, Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq, JsonSchema)]
177#[serde(rename_all = "snake_case")]
178pub enum CursorShape {
179 /// A vertical bar
180 #[default]
181 Bar,
182 /// A block that surrounds the following character
183 Block,
184 /// An underline that runs along the following character
185 Underline,
186 /// A box drawn around the following character
187 Hollow,
188}
189
190#[derive(Clone, Debug)]
191struct SelectionSet {
192 line_mode: bool,
193 cursor_shape: CursorShape,
194 selections: Arc<[Selection<Anchor>]>,
195 lamport_timestamp: clock::Lamport,
196}
197
198/// A diagnostic associated with a certain range of a buffer.
199#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
200pub struct Diagnostic {
201 /// The name of the service that produced this diagnostic.
202 pub source: Option<String>,
203 /// A machine-readable code that identifies this diagnostic.
204 pub code: Option<NumberOrString>,
205 pub code_description: Option<lsp::Url>,
206 /// Whether this diagnostic is a hint, warning, or error.
207 pub severity: DiagnosticSeverity,
208 /// The human-readable message associated with this diagnostic.
209 pub message: String,
210 /// The human-readable message (in markdown format)
211 pub markdown: Option<String>,
212 /// An id that identifies the group to which this diagnostic belongs.
213 ///
214 /// When a language server produces a diagnostic with
215 /// one or more associated diagnostics, those diagnostics are all
216 /// assigned a single group ID.
217 pub group_id: usize,
218 /// Whether this diagnostic is the primary diagnostic for its group.
219 ///
220 /// In a given group, the primary diagnostic is the top-level diagnostic
221 /// returned by the language server. The non-primary diagnostics are the
222 /// associated diagnostics.
223 pub is_primary: bool,
224 /// Whether this diagnostic is considered to originate from an analysis of
225 /// files on disk, as opposed to any unsaved buffer contents. This is a
226 /// property of a given diagnostic source, and is configured for a given
227 /// language server via the [`LspAdapter::disk_based_diagnostic_sources`](crate::LspAdapter::disk_based_diagnostic_sources) method
228 /// for the language server.
229 pub is_disk_based: bool,
230 /// Whether this diagnostic marks unnecessary code.
231 pub is_unnecessary: bool,
232 /// Quick separation of diagnostics groups based by their source.
233 pub source_kind: DiagnosticSourceKind,
234 /// Data from language server that produced this diagnostic. Passed back to the LS when we request code actions for this diagnostic.
235 pub data: Option<Value>,
236 /// Whether to underline the corresponding text range in the editor.
237 pub underline: bool,
238}
239
240#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)]
241pub enum DiagnosticSourceKind {
242 Pulled,
243 Pushed,
244 Other,
245}
246
247/// An operation used to synchronize this buffer with its other replicas.
248#[derive(Clone, Debug, PartialEq)]
249pub enum Operation {
250 /// A text operation.
251 Buffer(text::Operation),
252
253 /// An update to the buffer's diagnostics.
254 UpdateDiagnostics {
255 /// The id of the language server that produced the new diagnostics.
256 server_id: LanguageServerId,
257 /// The diagnostics.
258 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
259 /// The buffer's lamport timestamp.
260 lamport_timestamp: clock::Lamport,
261 },
262
263 /// An update to the most recent selections in this buffer.
264 UpdateSelections {
265 /// The selections.
266 selections: Arc<[Selection<Anchor>]>,
267 /// The buffer's lamport timestamp.
268 lamport_timestamp: clock::Lamport,
269 /// Whether the selections are in 'line mode'.
270 line_mode: bool,
271 /// The [`CursorShape`] associated with these selections.
272 cursor_shape: CursorShape,
273 },
274
275 /// An update to the characters that should trigger autocompletion
276 /// for this buffer.
277 UpdateCompletionTriggers {
278 /// The characters that trigger autocompletion.
279 triggers: Vec<String>,
280 /// The buffer's lamport timestamp.
281 lamport_timestamp: clock::Lamport,
282 /// The language server ID.
283 server_id: LanguageServerId,
284 },
285}
286
287/// An event that occurs in a buffer.
288#[derive(Clone, Debug, PartialEq)]
289pub enum BufferEvent {
290 /// The buffer was changed in a way that must be
291 /// propagated to its other replicas.
292 Operation {
293 operation: Operation,
294 is_local: bool,
295 },
296 /// The buffer was edited.
297 Edited,
298 /// The buffer's `dirty` bit changed.
299 DirtyChanged,
300 /// The buffer was saved.
301 Saved,
302 /// The buffer's file was changed on disk.
303 FileHandleChanged,
304 /// The buffer was reloaded.
305 Reloaded,
306 /// The buffer is in need of a reload
307 ReloadNeeded,
308 /// The buffer's language was changed.
309 LanguageChanged,
310 /// The buffer's syntax trees were updated.
311 Reparsed,
312 /// The buffer's diagnostics were updated.
313 DiagnosticsUpdated,
314 /// The buffer gained or lost editing capabilities.
315 CapabilityChanged,
316 /// The buffer was explicitly requested to close.
317 Closed,
318 /// The buffer was discarded when closing.
319 Discarded,
320}
321
322/// The file associated with a buffer.
323pub trait File: Send + Sync + Any {
324 /// Returns the [`LocalFile`] associated with this file, if the
325 /// file is local.
326 fn as_local(&self) -> Option<&dyn LocalFile>;
327
328 /// Returns whether this file is local.
329 fn is_local(&self) -> bool {
330 self.as_local().is_some()
331 }
332
333 /// Returns whether the file is new, exists in storage, or has been deleted. Includes metadata
334 /// only available in some states, such as modification time.
335 fn disk_state(&self) -> DiskState;
336
337 /// Returns the path of this file relative to the worktree's root directory.
338 fn path(&self) -> &Arc<Path>;
339
340 /// Returns the path of this file relative to the worktree's parent directory (this means it
341 /// includes the name of the worktree's root folder).
342 fn full_path(&self, cx: &App) -> PathBuf;
343
344 /// Returns the last component of this handle's absolute path. If this handle refers to the root
345 /// of its worktree, then this method will return the name of the worktree itself.
346 fn file_name<'a>(&'a self, cx: &'a App) -> &'a OsStr;
347
348 /// Returns the id of the worktree to which this file belongs.
349 ///
350 /// This is needed for looking up project-specific settings.
351 fn worktree_id(&self, cx: &App) -> WorktreeId;
352
353 /// Converts this file into a protobuf message.
354 fn to_proto(&self, cx: &App) -> rpc::proto::File;
355
356 /// Return whether Zed considers this to be a private file.
357 fn is_private(&self) -> bool;
358}
359
360/// The file's storage status - whether it's stored (`Present`), and if so when it was last
361/// modified. In the case where the file is not stored, it can be either `New` or `Deleted`. In the
362/// UI these two states are distinguished. For example, the buffer tab does not display a deletion
363/// indicator for new files.
364#[derive(Copy, Clone, Debug, PartialEq)]
365pub enum DiskState {
366 /// File created in Zed that has not been saved.
367 New,
368 /// File present on the filesystem.
369 Present { mtime: MTime },
370 /// Deleted file that was previously present.
371 Deleted,
372}
373
374impl DiskState {
375 /// Returns the file's last known modification time on disk.
376 pub fn mtime(self) -> Option<MTime> {
377 match self {
378 DiskState::New => None,
379 DiskState::Present { mtime } => Some(mtime),
380 DiskState::Deleted => None,
381 }
382 }
383
384 pub fn exists(&self) -> bool {
385 match self {
386 DiskState::New => false,
387 DiskState::Present { .. } => true,
388 DiskState::Deleted => false,
389 }
390 }
391}
392
393/// The file associated with a buffer, in the case where the file is on the local disk.
394pub trait LocalFile: File {
395 /// Returns the absolute path of this file
396 fn abs_path(&self, cx: &App) -> PathBuf;
397
398 /// Loads the file contents from disk and returns them as a UTF-8 encoded string.
399 fn load(&self, cx: &App) -> Task<Result<String>>;
400
401 /// Loads the file's contents from disk.
402 fn load_bytes(&self, cx: &App) -> Task<Result<Vec<u8>>>;
403}
404
405/// The auto-indent behavior associated with an editing operation.
406/// For some editing operations, each affected line of text has its
407/// indentation recomputed. For other operations, the entire block
408/// of edited text is adjusted uniformly.
409#[derive(Clone, Debug)]
410pub enum AutoindentMode {
411 /// Indent each line of inserted text.
412 EachLine,
413 /// Apply the same indentation adjustment to all of the lines
414 /// in a given insertion.
415 Block {
416 /// The original indentation column of the first line of each
417 /// insertion, if it has been copied.
418 ///
419 /// Knowing this makes it possible to preserve the relative indentation
420 /// of every line in the insertion from when it was copied.
421 ///
422 /// If the original indent column is `a`, and the first line of insertion
423 /// is then auto-indented to column `b`, then every other line of
424 /// the insertion will be auto-indented to column `b - a`
425 original_indent_columns: Vec<Option<u32>>,
426 },
427}
428
429#[derive(Clone)]
430struct AutoindentRequest {
431 before_edit: BufferSnapshot,
432 entries: Vec<AutoindentRequestEntry>,
433 is_block_mode: bool,
434 ignore_empty_lines: bool,
435}
436
437#[derive(Debug, Clone)]
438struct AutoindentRequestEntry {
439 /// A range of the buffer whose indentation should be adjusted.
440 range: Range<Anchor>,
441 /// Whether or not these lines should be considered brand new, for the
442 /// purpose of auto-indent. When text is not new, its indentation will
443 /// only be adjusted if the suggested indentation level has *changed*
444 /// since the edit was made.
445 first_line_is_new: bool,
446 indent_size: IndentSize,
447 original_indent_column: Option<u32>,
448}
449
450#[derive(Debug)]
451struct IndentSuggestion {
452 basis_row: u32,
453 delta: Ordering,
454 within_error: bool,
455}
456
457struct BufferChunkHighlights<'a> {
458 captures: SyntaxMapCaptures<'a>,
459 next_capture: Option<SyntaxMapCapture<'a>>,
460 stack: Vec<(usize, HighlightId)>,
461 highlight_maps: Vec<HighlightMap>,
462}
463
464/// An iterator that yields chunks of a buffer's text, along with their
465/// syntax highlights and diagnostic status.
466pub struct BufferChunks<'a> {
467 buffer_snapshot: Option<&'a BufferSnapshot>,
468 range: Range<usize>,
469 chunks: text::Chunks<'a>,
470 diagnostic_endpoints: Option<Peekable<vec::IntoIter<DiagnosticEndpoint>>>,
471 error_depth: usize,
472 warning_depth: usize,
473 information_depth: usize,
474 hint_depth: usize,
475 unnecessary_depth: usize,
476 underline: bool,
477 highlights: Option<BufferChunkHighlights<'a>>,
478}
479
480/// A chunk of a buffer's text, along with its syntax highlight and
481/// diagnostic status.
482#[derive(Clone, Debug, Default)]
483pub struct Chunk<'a> {
484 /// The text of the chunk.
485 pub text: &'a str,
486 /// The syntax highlighting style of the chunk.
487 pub syntax_highlight_id: Option<HighlightId>,
488 /// The highlight style that has been applied to this chunk in
489 /// the editor.
490 pub highlight_style: Option<HighlightStyle>,
491 /// The severity of diagnostic associated with this chunk, if any.
492 pub diagnostic_severity: Option<DiagnosticSeverity>,
493 /// Whether this chunk of text is marked as unnecessary.
494 pub is_unnecessary: bool,
495 /// Whether this chunk of text was originally a tab character.
496 pub is_tab: bool,
497 /// Whether this chunk of text was originally a tab character.
498 pub is_inlay: bool,
499 /// Whether to underline the corresponding text range in the editor.
500 pub underline: bool,
501}
502
503/// A set of edits to a given version of a buffer, computed asynchronously.
504#[derive(Debug)]
505pub struct Diff {
506 pub base_version: clock::Global,
507 pub line_ending: LineEnding,
508 pub edits: Vec<(Range<usize>, Arc<str>)>,
509}
510
511#[derive(Debug, Clone, Copy)]
512pub(crate) struct DiagnosticEndpoint {
513 offset: usize,
514 is_start: bool,
515 underline: bool,
516 severity: DiagnosticSeverity,
517 is_unnecessary: bool,
518}
519
520/// A class of characters, used for characterizing a run of text.
521#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
522pub enum CharKind {
523 /// Whitespace.
524 Whitespace,
525 /// Punctuation.
526 Punctuation,
527 /// Word.
528 Word,
529}
530
531/// A runnable is a set of data about a region that could be resolved into a task
532pub struct Runnable {
533 pub tags: SmallVec<[RunnableTag; 1]>,
534 pub language: Arc<Language>,
535 pub buffer: BufferId,
536}
537
538#[derive(Default, Clone, Debug)]
539pub struct HighlightedText {
540 pub text: SharedString,
541 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
542}
543
544#[derive(Default, Debug)]
545struct HighlightedTextBuilder {
546 pub text: String,
547 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
548}
549
550impl HighlightedText {
551 pub fn from_buffer_range<T: ToOffset>(
552 range: Range<T>,
553 snapshot: &text::BufferSnapshot,
554 syntax_snapshot: &SyntaxSnapshot,
555 override_style: Option<HighlightStyle>,
556 syntax_theme: &SyntaxTheme,
557 ) -> Self {
558 let mut highlighted_text = HighlightedTextBuilder::default();
559 highlighted_text.add_text_from_buffer_range(
560 range,
561 snapshot,
562 syntax_snapshot,
563 override_style,
564 syntax_theme,
565 );
566 highlighted_text.build()
567 }
568
569 pub fn to_styled_text(&self, default_style: &TextStyle) -> StyledText {
570 gpui::StyledText::new(self.text.clone())
571 .with_default_highlights(default_style, self.highlights.iter().cloned())
572 }
573
574 /// Returns the first line without leading whitespace unless highlighted
575 /// and a boolean indicating if there are more lines after
576 pub fn first_line_preview(self) -> (Self, bool) {
577 let newline_ix = self.text.find('\n').unwrap_or(self.text.len());
578 let first_line = &self.text[..newline_ix];
579
580 // Trim leading whitespace, unless an edit starts prior to it.
581 let mut preview_start_ix = first_line.len() - first_line.trim_start().len();
582 if let Some((first_highlight_range, _)) = self.highlights.first() {
583 preview_start_ix = preview_start_ix.min(first_highlight_range.start);
584 }
585
586 let preview_text = &first_line[preview_start_ix..];
587 let preview_highlights = self
588 .highlights
589 .into_iter()
590 .take_while(|(range, _)| range.start < newline_ix)
591 .filter_map(|(mut range, highlight)| {
592 range.start = range.start.saturating_sub(preview_start_ix);
593 range.end = range.end.saturating_sub(preview_start_ix).min(newline_ix);
594 if range.is_empty() {
595 None
596 } else {
597 Some((range, highlight))
598 }
599 });
600
601 let preview = Self {
602 text: SharedString::new(preview_text),
603 highlights: preview_highlights.collect(),
604 };
605
606 (preview, self.text.len() > newline_ix)
607 }
608}
609
610impl HighlightedTextBuilder {
611 pub fn build(self) -> HighlightedText {
612 HighlightedText {
613 text: self.text.into(),
614 highlights: self.highlights,
615 }
616 }
617
618 pub fn add_text_from_buffer_range<T: ToOffset>(
619 &mut self,
620 range: Range<T>,
621 snapshot: &text::BufferSnapshot,
622 syntax_snapshot: &SyntaxSnapshot,
623 override_style: Option<HighlightStyle>,
624 syntax_theme: &SyntaxTheme,
625 ) {
626 let range = range.to_offset(snapshot);
627 for chunk in Self::highlighted_chunks(range, snapshot, syntax_snapshot) {
628 let start = self.text.len();
629 self.text.push_str(chunk.text);
630 let end = self.text.len();
631
632 if let Some(mut highlight_style) = chunk
633 .syntax_highlight_id
634 .and_then(|id| id.style(syntax_theme))
635 {
636 if let Some(override_style) = override_style {
637 highlight_style.highlight(override_style);
638 }
639 self.highlights.push((start..end, highlight_style));
640 } else if let Some(override_style) = override_style {
641 self.highlights.push((start..end, override_style));
642 }
643 }
644 }
645
646 fn highlighted_chunks<'a>(
647 range: Range<usize>,
648 snapshot: &'a text::BufferSnapshot,
649 syntax_snapshot: &'a SyntaxSnapshot,
650 ) -> BufferChunks<'a> {
651 let captures = syntax_snapshot.captures(range.clone(), snapshot, |grammar| {
652 grammar.highlights_query.as_ref()
653 });
654
655 let highlight_maps = captures
656 .grammars()
657 .iter()
658 .map(|grammar| grammar.highlight_map())
659 .collect();
660
661 BufferChunks::new(
662 snapshot.as_rope(),
663 range,
664 Some((captures, highlight_maps)),
665 false,
666 None,
667 )
668 }
669}
670
671#[derive(Clone)]
672pub struct EditPreview {
673 old_snapshot: text::BufferSnapshot,
674 applied_edits_snapshot: text::BufferSnapshot,
675 syntax_snapshot: SyntaxSnapshot,
676}
677
678impl EditPreview {
679 pub fn highlight_edits(
680 &self,
681 current_snapshot: &BufferSnapshot,
682 edits: &[(Range<Anchor>, String)],
683 include_deletions: bool,
684 cx: &App,
685 ) -> HighlightedText {
686 let Some(visible_range_in_preview_snapshot) = self.compute_visible_range(edits) else {
687 return HighlightedText::default();
688 };
689
690 let mut highlighted_text = HighlightedTextBuilder::default();
691
692 let mut offset_in_preview_snapshot = visible_range_in_preview_snapshot.start;
693
694 let insertion_highlight_style = HighlightStyle {
695 background_color: Some(cx.theme().status().created_background),
696 ..Default::default()
697 };
698 let deletion_highlight_style = HighlightStyle {
699 background_color: Some(cx.theme().status().deleted_background),
700 ..Default::default()
701 };
702 let syntax_theme = cx.theme().syntax();
703
704 for (range, edit_text) in edits {
705 let edit_new_end_in_preview_snapshot = range
706 .end
707 .bias_right(&self.old_snapshot)
708 .to_offset(&self.applied_edits_snapshot);
709 let edit_start_in_preview_snapshot = edit_new_end_in_preview_snapshot - edit_text.len();
710
711 let unchanged_range_in_preview_snapshot =
712 offset_in_preview_snapshot..edit_start_in_preview_snapshot;
713 if !unchanged_range_in_preview_snapshot.is_empty() {
714 highlighted_text.add_text_from_buffer_range(
715 unchanged_range_in_preview_snapshot,
716 &self.applied_edits_snapshot,
717 &self.syntax_snapshot,
718 None,
719 &syntax_theme,
720 );
721 }
722
723 let range_in_current_snapshot = range.to_offset(current_snapshot);
724 if include_deletions && !range_in_current_snapshot.is_empty() {
725 highlighted_text.add_text_from_buffer_range(
726 range_in_current_snapshot,
727 ¤t_snapshot.text,
728 ¤t_snapshot.syntax,
729 Some(deletion_highlight_style),
730 &syntax_theme,
731 );
732 }
733
734 if !edit_text.is_empty() {
735 highlighted_text.add_text_from_buffer_range(
736 edit_start_in_preview_snapshot..edit_new_end_in_preview_snapshot,
737 &self.applied_edits_snapshot,
738 &self.syntax_snapshot,
739 Some(insertion_highlight_style),
740 &syntax_theme,
741 );
742 }
743
744 offset_in_preview_snapshot = edit_new_end_in_preview_snapshot;
745 }
746
747 highlighted_text.add_text_from_buffer_range(
748 offset_in_preview_snapshot..visible_range_in_preview_snapshot.end,
749 &self.applied_edits_snapshot,
750 &self.syntax_snapshot,
751 None,
752 &syntax_theme,
753 );
754
755 highlighted_text.build()
756 }
757
758 fn compute_visible_range(&self, edits: &[(Range<Anchor>, String)]) -> Option<Range<usize>> {
759 let (first, _) = edits.first()?;
760 let (last, _) = edits.last()?;
761
762 let start = first
763 .start
764 .bias_left(&self.old_snapshot)
765 .to_point(&self.applied_edits_snapshot);
766 let end = last
767 .end
768 .bias_right(&self.old_snapshot)
769 .to_point(&self.applied_edits_snapshot);
770
771 // Ensure that the first line of the first edit and the last line of the last edit are always fully visible
772 let range = Point::new(start.row, 0)
773 ..Point::new(end.row, self.applied_edits_snapshot.line_len(end.row));
774
775 Some(range.to_offset(&self.applied_edits_snapshot))
776 }
777}
778
779#[derive(Clone, Debug, PartialEq, Eq)]
780pub struct BracketMatch {
781 pub open_range: Range<usize>,
782 pub close_range: Range<usize>,
783 pub newline_only: bool,
784}
785
786impl Buffer {
787 /// Create a new buffer with the given base text.
788 pub fn local<T: Into<String>>(base_text: T, cx: &Context<Self>) -> Self {
789 Self::build(
790 TextBuffer::new(0, cx.entity_id().as_non_zero_u64().into(), base_text.into()),
791 None,
792 Capability::ReadWrite,
793 )
794 }
795
796 /// Create a new buffer with the given base text that has proper line endings and other normalization applied.
797 pub fn local_normalized(
798 base_text_normalized: Rope,
799 line_ending: LineEnding,
800 cx: &Context<Self>,
801 ) -> Self {
802 Self::build(
803 TextBuffer::new_normalized(
804 0,
805 cx.entity_id().as_non_zero_u64().into(),
806 line_ending,
807 base_text_normalized,
808 ),
809 None,
810 Capability::ReadWrite,
811 )
812 }
813
814 /// Create a new buffer that is a replica of a remote buffer.
815 pub fn remote(
816 remote_id: BufferId,
817 replica_id: ReplicaId,
818 capability: Capability,
819 base_text: impl Into<String>,
820 ) -> Self {
821 Self::build(
822 TextBuffer::new(replica_id, remote_id, base_text.into()),
823 None,
824 capability,
825 )
826 }
827
828 /// Create a new buffer that is a replica of a remote buffer, populating its
829 /// state from the given protobuf message.
830 pub fn from_proto(
831 replica_id: ReplicaId,
832 capability: Capability,
833 message: proto::BufferState,
834 file: Option<Arc<dyn File>>,
835 ) -> Result<Self> {
836 let buffer_id = BufferId::new(message.id).context("Could not deserialize buffer_id")?;
837 let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text);
838 let mut this = Self::build(buffer, file, capability);
839 this.text.set_line_ending(proto::deserialize_line_ending(
840 rpc::proto::LineEnding::from_i32(message.line_ending).context("missing line_ending")?,
841 ));
842 this.saved_version = proto::deserialize_version(&message.saved_version);
843 this.saved_mtime = message.saved_mtime.map(|time| time.into());
844 Ok(this)
845 }
846
847 /// Serialize the buffer's state to a protobuf message.
848 pub fn to_proto(&self, cx: &App) -> proto::BufferState {
849 proto::BufferState {
850 id: self.remote_id().into(),
851 file: self.file.as_ref().map(|f| f.to_proto(cx)),
852 base_text: self.base_text().to_string(),
853 line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
854 saved_version: proto::serialize_version(&self.saved_version),
855 saved_mtime: self.saved_mtime.map(|time| time.into()),
856 }
857 }
858
859 /// Serialize as protobufs all of the changes to the buffer since the given version.
860 pub fn serialize_ops(
861 &self,
862 since: Option<clock::Global>,
863 cx: &App,
864 ) -> Task<Vec<proto::Operation>> {
865 let mut operations = Vec::new();
866 operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
867
868 operations.extend(self.remote_selections.iter().map(|(_, set)| {
869 proto::serialize_operation(&Operation::UpdateSelections {
870 selections: set.selections.clone(),
871 lamport_timestamp: set.lamport_timestamp,
872 line_mode: set.line_mode,
873 cursor_shape: set.cursor_shape,
874 })
875 }));
876
877 for (server_id, diagnostics) in &self.diagnostics {
878 operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
879 lamport_timestamp: self.diagnostics_timestamp,
880 server_id: *server_id,
881 diagnostics: diagnostics.iter().cloned().collect(),
882 }));
883 }
884
885 for (server_id, completions) in &self.completion_triggers_per_language_server {
886 operations.push(proto::serialize_operation(
887 &Operation::UpdateCompletionTriggers {
888 triggers: completions.iter().cloned().collect(),
889 lamport_timestamp: self.completion_triggers_timestamp,
890 server_id: *server_id,
891 },
892 ));
893 }
894
895 let text_operations = self.text.operations().clone();
896 cx.background_spawn(async move {
897 let since = since.unwrap_or_default();
898 operations.extend(
899 text_operations
900 .iter()
901 .filter(|(_, op)| !since.observed(op.timestamp()))
902 .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
903 );
904 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
905 operations
906 })
907 }
908
909 /// Assign a language to the buffer, returning the buffer.
910 pub fn with_language(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
911 self.set_language(Some(language), cx);
912 self
913 }
914
915 /// Returns the [`Capability`] of this buffer.
916 pub fn capability(&self) -> Capability {
917 self.capability
918 }
919
920 /// Whether this buffer can only be read.
921 pub fn read_only(&self) -> bool {
922 self.capability == Capability::ReadOnly
923 }
924
925 /// Builds a [`Buffer`] with the given underlying [`TextBuffer`], diff base, [`File`] and [`Capability`].
926 pub fn build(buffer: TextBuffer, file: Option<Arc<dyn File>>, capability: Capability) -> Self {
927 let saved_mtime = file.as_ref().and_then(|file| file.disk_state().mtime());
928 let snapshot = buffer.snapshot();
929 let syntax_map = Mutex::new(SyntaxMap::new(&snapshot));
930 Self {
931 saved_mtime,
932 saved_version: buffer.version(),
933 preview_version: buffer.version(),
934 reload_task: None,
935 transaction_depth: 0,
936 was_dirty_before_starting_transaction: None,
937 has_unsaved_edits: Cell::new((buffer.version(), false)),
938 text: buffer,
939 branch_state: None,
940 file,
941 capability,
942 syntax_map,
943 reparse: None,
944 non_text_state_update_count: 0,
945 sync_parse_timeout: Duration::from_millis(1),
946 parse_status: async_watch::channel(ParseStatus::Idle),
947 autoindent_requests: Default::default(),
948 pending_autoindent: Default::default(),
949 language: None,
950 remote_selections: Default::default(),
951 diagnostics: Default::default(),
952 diagnostics_timestamp: Default::default(),
953 completion_triggers: Default::default(),
954 completion_triggers_per_language_server: Default::default(),
955 completion_triggers_timestamp: Default::default(),
956 deferred_ops: OperationQueue::new(),
957 has_conflict: false,
958 change_bits: Default::default(),
959 _subscriptions: Vec::new(),
960 }
961 }
962
963 pub fn build_snapshot(
964 text: Rope,
965 language: Option<Arc<Language>>,
966 language_registry: Option<Arc<LanguageRegistry>>,
967 cx: &mut App,
968 ) -> impl Future<Output = BufferSnapshot> + use<> {
969 let entity_id = cx.reserve_entity::<Self>().entity_id();
970 let buffer_id = entity_id.as_non_zero_u64().into();
971 async move {
972 let text =
973 TextBuffer::new_normalized(0, buffer_id, Default::default(), text).snapshot();
974 let mut syntax = SyntaxMap::new(&text).snapshot();
975 if let Some(language) = language.clone() {
976 let text = text.clone();
977 let language = language.clone();
978 let language_registry = language_registry.clone();
979 syntax.reparse(&text, language_registry, language);
980 }
981 BufferSnapshot {
982 text,
983 syntax,
984 file: None,
985 diagnostics: Default::default(),
986 remote_selections: Default::default(),
987 language,
988 non_text_state_update_count: 0,
989 }
990 }
991 }
992
993 pub fn build_empty_snapshot(cx: &mut App) -> BufferSnapshot {
994 let entity_id = cx.reserve_entity::<Self>().entity_id();
995 let buffer_id = entity_id.as_non_zero_u64().into();
996 let text =
997 TextBuffer::new_normalized(0, buffer_id, Default::default(), Rope::new()).snapshot();
998 let syntax = SyntaxMap::new(&text).snapshot();
999 BufferSnapshot {
1000 text,
1001 syntax,
1002 file: None,
1003 diagnostics: Default::default(),
1004 remote_selections: Default::default(),
1005 language: None,
1006 non_text_state_update_count: 0,
1007 }
1008 }
1009
1010 #[cfg(any(test, feature = "test-support"))]
1011 pub fn build_snapshot_sync(
1012 text: Rope,
1013 language: Option<Arc<Language>>,
1014 language_registry: Option<Arc<LanguageRegistry>>,
1015 cx: &mut App,
1016 ) -> BufferSnapshot {
1017 let entity_id = cx.reserve_entity::<Self>().entity_id();
1018 let buffer_id = entity_id.as_non_zero_u64().into();
1019 let text = TextBuffer::new_normalized(0, buffer_id, Default::default(), text).snapshot();
1020 let mut syntax = SyntaxMap::new(&text).snapshot();
1021 if let Some(language) = language.clone() {
1022 let text = text.clone();
1023 let language = language.clone();
1024 let language_registry = language_registry.clone();
1025 syntax.reparse(&text, language_registry, language);
1026 }
1027 BufferSnapshot {
1028 text,
1029 syntax,
1030 file: None,
1031 diagnostics: Default::default(),
1032 remote_selections: Default::default(),
1033 language,
1034 non_text_state_update_count: 0,
1035 }
1036 }
1037
1038 /// Retrieve a snapshot of the buffer's current state. This is computationally
1039 /// cheap, and allows reading from the buffer on a background thread.
1040 pub fn snapshot(&self) -> BufferSnapshot {
1041 let text = self.text.snapshot();
1042 let mut syntax_map = self.syntax_map.lock();
1043 syntax_map.interpolate(&text);
1044 let syntax = syntax_map.snapshot();
1045
1046 BufferSnapshot {
1047 text,
1048 syntax,
1049 file: self.file.clone(),
1050 remote_selections: self.remote_selections.clone(),
1051 diagnostics: self.diagnostics.clone(),
1052 language: self.language.clone(),
1053 non_text_state_update_count: self.non_text_state_update_count,
1054 }
1055 }
1056
1057 pub fn branch(&mut self, cx: &mut Context<Self>) -> Entity<Self> {
1058 let this = cx.entity();
1059 cx.new(|cx| {
1060 let mut branch = Self {
1061 branch_state: Some(BufferBranchState {
1062 base_buffer: this.clone(),
1063 merged_operations: Default::default(),
1064 }),
1065 language: self.language.clone(),
1066 has_conflict: self.has_conflict,
1067 has_unsaved_edits: Cell::new(self.has_unsaved_edits.get_mut().clone()),
1068 _subscriptions: vec![cx.subscribe(&this, Self::on_base_buffer_event)],
1069 ..Self::build(self.text.branch(), self.file.clone(), self.capability())
1070 };
1071 if let Some(language_registry) = self.language_registry() {
1072 branch.set_language_registry(language_registry);
1073 }
1074
1075 // Reparse the branch buffer so that we get syntax highlighting immediately.
1076 branch.reparse(cx);
1077
1078 branch
1079 })
1080 }
1081
1082 pub fn preview_edits(
1083 &self,
1084 edits: Arc<[(Range<Anchor>, String)]>,
1085 cx: &App,
1086 ) -> Task<EditPreview> {
1087 let registry = self.language_registry();
1088 let language = self.language().cloned();
1089 let old_snapshot = self.text.snapshot();
1090 let mut branch_buffer = self.text.branch();
1091 let mut syntax_snapshot = self.syntax_map.lock().snapshot();
1092 cx.background_spawn(async move {
1093 if !edits.is_empty() {
1094 if let Some(language) = language.clone() {
1095 syntax_snapshot.reparse(&old_snapshot, registry.clone(), language);
1096 }
1097
1098 branch_buffer.edit(edits.iter().cloned());
1099 let snapshot = branch_buffer.snapshot();
1100 syntax_snapshot.interpolate(&snapshot);
1101
1102 if let Some(language) = language {
1103 syntax_snapshot.reparse(&snapshot, registry, language);
1104 }
1105 }
1106 EditPreview {
1107 old_snapshot,
1108 applied_edits_snapshot: branch_buffer.snapshot(),
1109 syntax_snapshot,
1110 }
1111 })
1112 }
1113
1114 /// Applies all of the changes in this buffer that intersect any of the
1115 /// given `ranges` to its base buffer.
1116 ///
1117 /// If `ranges` is empty, then all changes will be applied. This buffer must
1118 /// be a branch buffer to call this method.
1119 pub fn merge_into_base(&mut self, ranges: Vec<Range<usize>>, cx: &mut Context<Self>) {
1120 let Some(base_buffer) = self.base_buffer() else {
1121 debug_panic!("not a branch buffer");
1122 return;
1123 };
1124
1125 let mut ranges = if ranges.is_empty() {
1126 &[0..usize::MAX]
1127 } else {
1128 ranges.as_slice()
1129 }
1130 .into_iter()
1131 .peekable();
1132
1133 let mut edits = Vec::new();
1134 for edit in self.edits_since::<usize>(&base_buffer.read(cx).version()) {
1135 let mut is_included = false;
1136 while let Some(range) = ranges.peek() {
1137 if range.end < edit.new.start {
1138 ranges.next().unwrap();
1139 } else {
1140 if range.start <= edit.new.end {
1141 is_included = true;
1142 }
1143 break;
1144 }
1145 }
1146
1147 if is_included {
1148 edits.push((
1149 edit.old.clone(),
1150 self.text_for_range(edit.new.clone()).collect::<String>(),
1151 ));
1152 }
1153 }
1154
1155 let operation = base_buffer.update(cx, |base_buffer, cx| {
1156 // cx.emit(BufferEvent::DiffBaseChanged);
1157 base_buffer.edit(edits, None, cx)
1158 });
1159
1160 if let Some(operation) = operation {
1161 if let Some(BufferBranchState {
1162 merged_operations, ..
1163 }) = &mut self.branch_state
1164 {
1165 merged_operations.push(operation);
1166 }
1167 }
1168 }
1169
1170 fn on_base_buffer_event(
1171 &mut self,
1172 _: Entity<Buffer>,
1173 event: &BufferEvent,
1174 cx: &mut Context<Self>,
1175 ) {
1176 let BufferEvent::Operation { operation, .. } = event else {
1177 return;
1178 };
1179 let Some(BufferBranchState {
1180 merged_operations, ..
1181 }) = &mut self.branch_state
1182 else {
1183 return;
1184 };
1185
1186 let mut operation_to_undo = None;
1187 if let Operation::Buffer(text::Operation::Edit(operation)) = &operation {
1188 if let Ok(ix) = merged_operations.binary_search(&operation.timestamp) {
1189 merged_operations.remove(ix);
1190 operation_to_undo = Some(operation.timestamp);
1191 }
1192 }
1193
1194 self.apply_ops([operation.clone()], cx);
1195
1196 if let Some(timestamp) = operation_to_undo {
1197 let counts = [(timestamp, u32::MAX)].into_iter().collect();
1198 self.undo_operations(counts, cx);
1199 }
1200 }
1201
1202 #[cfg(test)]
1203 pub(crate) fn as_text_snapshot(&self) -> &text::BufferSnapshot {
1204 &self.text
1205 }
1206
1207 /// Retrieve a snapshot of the buffer's raw text, without any
1208 /// language-related state like the syntax tree or diagnostics.
1209 pub fn text_snapshot(&self) -> text::BufferSnapshot {
1210 self.text.snapshot()
1211 }
1212
1213 /// The file associated with the buffer, if any.
1214 pub fn file(&self) -> Option<&Arc<dyn File>> {
1215 self.file.as_ref()
1216 }
1217
1218 /// The version of the buffer that was last saved or reloaded from disk.
1219 pub fn saved_version(&self) -> &clock::Global {
1220 &self.saved_version
1221 }
1222
1223 /// The mtime of the buffer's file when the buffer was last saved or reloaded from disk.
1224 pub fn saved_mtime(&self) -> Option<MTime> {
1225 self.saved_mtime
1226 }
1227
1228 /// Assign a language to the buffer.
1229 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1230 self.non_text_state_update_count += 1;
1231 self.syntax_map.lock().clear(&self.text);
1232 self.language = language;
1233 self.was_changed();
1234 self.reparse(cx);
1235 cx.emit(BufferEvent::LanguageChanged);
1236 }
1237
1238 /// Assign a language registry to the buffer. This allows the buffer to retrieve
1239 /// other languages if parts of the buffer are written in different languages.
1240 pub fn set_language_registry(&self, language_registry: Arc<LanguageRegistry>) {
1241 self.syntax_map
1242 .lock()
1243 .set_language_registry(language_registry);
1244 }
1245
1246 pub fn language_registry(&self) -> Option<Arc<LanguageRegistry>> {
1247 self.syntax_map.lock().language_registry()
1248 }
1249
1250 /// Assign the buffer a new [`Capability`].
1251 pub fn set_capability(&mut self, capability: Capability, cx: &mut Context<Self>) {
1252 self.capability = capability;
1253 cx.emit(BufferEvent::CapabilityChanged)
1254 }
1255
1256 /// This method is called to signal that the buffer has been saved.
1257 pub fn did_save(
1258 &mut self,
1259 version: clock::Global,
1260 mtime: Option<MTime>,
1261 cx: &mut Context<Self>,
1262 ) {
1263 self.saved_version = version;
1264 self.has_unsaved_edits
1265 .set((self.saved_version().clone(), false));
1266 self.has_conflict = false;
1267 self.saved_mtime = mtime;
1268 self.was_changed();
1269 cx.emit(BufferEvent::Saved);
1270 cx.notify();
1271 }
1272
1273 /// This method is called to signal that the buffer has been discarded.
1274 pub fn discarded(&self, cx: &mut Context<Self>) {
1275 cx.emit(BufferEvent::Discarded);
1276 cx.notify();
1277 }
1278
1279 /// Reloads the contents of the buffer from disk.
1280 pub fn reload(&mut self, cx: &Context<Self>) -> oneshot::Receiver<Option<Transaction>> {
1281 let (tx, rx) = futures::channel::oneshot::channel();
1282 let prev_version = self.text.version();
1283 self.reload_task = Some(cx.spawn(async move |this, cx| {
1284 let Some((new_mtime, new_text)) = this.update(cx, |this, cx| {
1285 let file = this.file.as_ref()?.as_local()?;
1286
1287 Some((file.disk_state().mtime(), file.load(cx)))
1288 })?
1289 else {
1290 return Ok(());
1291 };
1292
1293 let new_text = new_text.await?;
1294 let diff = this
1295 .update(cx, |this, cx| this.diff(new_text.clone(), cx))?
1296 .await;
1297 this.update(cx, |this, cx| {
1298 if this.version() == diff.base_version {
1299 this.finalize_last_transaction();
1300 this.apply_diff(diff, cx);
1301 tx.send(this.finalize_last_transaction().cloned()).ok();
1302 this.has_conflict = false;
1303 this.did_reload(this.version(), this.line_ending(), new_mtime, cx);
1304 } else {
1305 if !diff.edits.is_empty()
1306 || this
1307 .edits_since::<usize>(&diff.base_version)
1308 .next()
1309 .is_some()
1310 {
1311 this.has_conflict = true;
1312 }
1313
1314 this.did_reload(prev_version, this.line_ending(), this.saved_mtime, cx);
1315 }
1316
1317 this.reload_task.take();
1318 })
1319 }));
1320 rx
1321 }
1322
1323 /// This method is called to signal that the buffer has been reloaded.
1324 pub fn did_reload(
1325 &mut self,
1326 version: clock::Global,
1327 line_ending: LineEnding,
1328 mtime: Option<MTime>,
1329 cx: &mut Context<Self>,
1330 ) {
1331 self.saved_version = version;
1332 self.has_unsaved_edits
1333 .set((self.saved_version.clone(), false));
1334 self.text.set_line_ending(line_ending);
1335 self.saved_mtime = mtime;
1336 cx.emit(BufferEvent::Reloaded);
1337 cx.notify();
1338 }
1339
1340 /// Updates the [`File`] backing this buffer. This should be called when
1341 /// the file has changed or has been deleted.
1342 pub fn file_updated(&mut self, new_file: Arc<dyn File>, cx: &mut Context<Self>) {
1343 let was_dirty = self.is_dirty();
1344 let mut file_changed = false;
1345
1346 if let Some(old_file) = self.file.as_ref() {
1347 if new_file.path() != old_file.path() {
1348 file_changed = true;
1349 }
1350
1351 let old_state = old_file.disk_state();
1352 let new_state = new_file.disk_state();
1353 if old_state != new_state {
1354 file_changed = true;
1355 if !was_dirty && matches!(new_state, DiskState::Present { .. }) {
1356 cx.emit(BufferEvent::ReloadNeeded)
1357 }
1358 }
1359 } else {
1360 file_changed = true;
1361 };
1362
1363 self.file = Some(new_file);
1364 if file_changed {
1365 self.was_changed();
1366 self.non_text_state_update_count += 1;
1367 if was_dirty != self.is_dirty() {
1368 cx.emit(BufferEvent::DirtyChanged);
1369 }
1370 cx.emit(BufferEvent::FileHandleChanged);
1371 cx.notify();
1372 }
1373 }
1374
1375 pub fn base_buffer(&self) -> Option<Entity<Self>> {
1376 Some(self.branch_state.as_ref()?.base_buffer.clone())
1377 }
1378
1379 /// Returns the primary [`Language`] assigned to this [`Buffer`].
1380 pub fn language(&self) -> Option<&Arc<Language>> {
1381 self.language.as_ref()
1382 }
1383
1384 /// Returns the [`Language`] at the given location.
1385 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
1386 let offset = position.to_offset(self);
1387 self.syntax_map
1388 .lock()
1389 .layers_for_range(offset..offset, &self.text, false)
1390 .last()
1391 .map(|info| info.language.clone())
1392 .or_else(|| self.language.clone())
1393 }
1394
1395 /// Returns each [`Language`] for the active syntax layers at the given location.
1396 pub fn languages_at<D: ToOffset>(&self, position: D) -> Vec<Arc<Language>> {
1397 let offset = position.to_offset(self);
1398 let mut languages: Vec<Arc<Language>> = self
1399 .syntax_map
1400 .lock()
1401 .layers_for_range(offset..offset, &self.text, false)
1402 .map(|info| info.language.clone())
1403 .collect();
1404
1405 if languages.is_empty() {
1406 if let Some(buffer_language) = self.language() {
1407 languages.push(buffer_language.clone());
1408 }
1409 }
1410
1411 languages
1412 }
1413
1414 /// An integer version number that accounts for all updates besides
1415 /// the buffer's text itself (which is versioned via a version vector).
1416 pub fn non_text_state_update_count(&self) -> usize {
1417 self.non_text_state_update_count
1418 }
1419
1420 /// Whether the buffer is being parsed in the background.
1421 #[cfg(any(test, feature = "test-support"))]
1422 pub fn is_parsing(&self) -> bool {
1423 self.reparse.is_some()
1424 }
1425
1426 /// Indicates whether the buffer contains any regions that may be
1427 /// written in a language that hasn't been loaded yet.
1428 pub fn contains_unknown_injections(&self) -> bool {
1429 self.syntax_map.lock().contains_unknown_injections()
1430 }
1431
1432 #[cfg(test)]
1433 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
1434 self.sync_parse_timeout = timeout;
1435 }
1436
1437 /// Called after an edit to synchronize the buffer's main parse tree with
1438 /// the buffer's new underlying state.
1439 ///
1440 /// Locks the syntax map and interpolates the edits since the last reparse
1441 /// into the foreground syntax tree.
1442 ///
1443 /// Then takes a stable snapshot of the syntax map before unlocking it.
1444 /// The snapshot with the interpolated edits is sent to a background thread,
1445 /// where we ask Tree-sitter to perform an incremental parse.
1446 ///
1447 /// Meanwhile, in the foreground, we block the main thread for up to 1ms
1448 /// waiting on the parse to complete. As soon as it completes, we proceed
1449 /// synchronously, unless a 1ms timeout elapses.
1450 ///
1451 /// If we time out waiting on the parse, we spawn a second task waiting
1452 /// until the parse does complete and return with the interpolated tree still
1453 /// in the foreground. When the background parse completes, call back into
1454 /// the main thread and assign the foreground parse state.
1455 ///
1456 /// If the buffer or grammar changed since the start of the background parse,
1457 /// initiate an additional reparse recursively. To avoid concurrent parses
1458 /// for the same buffer, we only initiate a new parse if we are not already
1459 /// parsing in the background.
1460 pub fn reparse(&mut self, cx: &mut Context<Self>) {
1461 if self.reparse.is_some() {
1462 return;
1463 }
1464 let language = if let Some(language) = self.language.clone() {
1465 language
1466 } else {
1467 return;
1468 };
1469
1470 let text = self.text_snapshot();
1471 let parsed_version = self.version();
1472
1473 let mut syntax_map = self.syntax_map.lock();
1474 syntax_map.interpolate(&text);
1475 let language_registry = syntax_map.language_registry();
1476 let mut syntax_snapshot = syntax_map.snapshot();
1477 drop(syntax_map);
1478
1479 let parse_task = cx.background_spawn({
1480 let language = language.clone();
1481 let language_registry = language_registry.clone();
1482 async move {
1483 syntax_snapshot.reparse(&text, language_registry, language);
1484 syntax_snapshot
1485 }
1486 });
1487
1488 self.parse_status.0.send(ParseStatus::Parsing).unwrap();
1489 match cx
1490 .background_executor()
1491 .block_with_timeout(self.sync_parse_timeout, parse_task)
1492 {
1493 Ok(new_syntax_snapshot) => {
1494 self.did_finish_parsing(new_syntax_snapshot, cx);
1495 self.reparse = None;
1496 }
1497 Err(parse_task) => {
1498 self.reparse = Some(cx.spawn(async move |this, cx| {
1499 let new_syntax_map = parse_task.await;
1500 this.update(cx, move |this, cx| {
1501 let grammar_changed =
1502 this.language.as_ref().map_or(true, |current_language| {
1503 !Arc::ptr_eq(&language, current_language)
1504 });
1505 let language_registry_changed = new_syntax_map
1506 .contains_unknown_injections()
1507 && language_registry.map_or(false, |registry| {
1508 registry.version() != new_syntax_map.language_registry_version()
1509 });
1510 let parse_again = language_registry_changed
1511 || grammar_changed
1512 || this.version.changed_since(&parsed_version);
1513 this.did_finish_parsing(new_syntax_map, cx);
1514 this.reparse = None;
1515 if parse_again {
1516 this.reparse(cx);
1517 }
1518 })
1519 .ok();
1520 }));
1521 }
1522 }
1523 }
1524
1525 fn did_finish_parsing(&mut self, syntax_snapshot: SyntaxSnapshot, cx: &mut Context<Self>) {
1526 self.was_changed();
1527 self.non_text_state_update_count += 1;
1528 self.syntax_map.lock().did_parse(syntax_snapshot);
1529 self.request_autoindent(cx);
1530 self.parse_status.0.send(ParseStatus::Idle).unwrap();
1531 cx.emit(BufferEvent::Reparsed);
1532 cx.notify();
1533 }
1534
1535 pub fn parse_status(&self) -> watch::Receiver<ParseStatus> {
1536 self.parse_status.1.clone()
1537 }
1538
1539 /// Assign to the buffer a set of diagnostics created by a given language server.
1540 pub fn update_diagnostics(
1541 &mut self,
1542 server_id: LanguageServerId,
1543 diagnostics: DiagnosticSet,
1544 cx: &mut Context<Self>,
1545 ) {
1546 let lamport_timestamp = self.text.lamport_clock.tick();
1547 let op = Operation::UpdateDiagnostics {
1548 server_id,
1549 diagnostics: diagnostics.iter().cloned().collect(),
1550 lamport_timestamp,
1551 };
1552 self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
1553 self.send_operation(op, true, cx);
1554 }
1555
1556 pub fn get_diagnostics(&self, server_id: LanguageServerId) -> Option<&DiagnosticSet> {
1557 let Ok(idx) = self.diagnostics.binary_search_by_key(&server_id, |v| v.0) else {
1558 return None;
1559 };
1560 Some(&self.diagnostics[idx].1)
1561 }
1562
1563 fn request_autoindent(&mut self, cx: &mut Context<Self>) {
1564 if let Some(indent_sizes) = self.compute_autoindents() {
1565 let indent_sizes = cx.background_spawn(indent_sizes);
1566 match cx
1567 .background_executor()
1568 .block_with_timeout(Duration::from_micros(500), indent_sizes)
1569 {
1570 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
1571 Err(indent_sizes) => {
1572 self.pending_autoindent = Some(cx.spawn(async move |this, cx| {
1573 let indent_sizes = indent_sizes.await;
1574 this.update(cx, |this, cx| {
1575 this.apply_autoindents(indent_sizes, cx);
1576 })
1577 .ok();
1578 }));
1579 }
1580 }
1581 } else {
1582 self.autoindent_requests.clear();
1583 }
1584 }
1585
1586 fn compute_autoindents(
1587 &self,
1588 ) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>> + use<>> {
1589 let max_rows_between_yields = 100;
1590 let snapshot = self.snapshot();
1591 if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
1592 return None;
1593 }
1594
1595 let autoindent_requests = self.autoindent_requests.clone();
1596 Some(async move {
1597 let mut indent_sizes = BTreeMap::<u32, (IndentSize, bool)>::new();
1598 for request in autoindent_requests {
1599 // Resolve each edited range to its row in the current buffer and in the
1600 // buffer before this batch of edits.
1601 let mut row_ranges = Vec::new();
1602 let mut old_to_new_rows = BTreeMap::new();
1603 let mut language_indent_sizes_by_new_row = Vec::new();
1604 for entry in &request.entries {
1605 let position = entry.range.start;
1606 let new_row = position.to_point(&snapshot).row;
1607 let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
1608 language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
1609
1610 if !entry.first_line_is_new {
1611 let old_row = position.to_point(&request.before_edit).row;
1612 old_to_new_rows.insert(old_row, new_row);
1613 }
1614 row_ranges.push((new_row..new_end_row, entry.original_indent_column));
1615 }
1616
1617 // Build a map containing the suggested indentation for each of the edited lines
1618 // with respect to the state of the buffer before these edits. This map is keyed
1619 // by the rows for these lines in the current state of the buffer.
1620 let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
1621 let old_edited_ranges =
1622 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
1623 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1624 let mut language_indent_size = IndentSize::default();
1625 for old_edited_range in old_edited_ranges {
1626 let suggestions = request
1627 .before_edit
1628 .suggest_autoindents(old_edited_range.clone())
1629 .into_iter()
1630 .flatten();
1631 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1632 if let Some(suggestion) = suggestion {
1633 let new_row = *old_to_new_rows.get(&old_row).unwrap();
1634
1635 // Find the indent size based on the language for this row.
1636 while let Some((row, size)) = language_indent_sizes.peek() {
1637 if *row > new_row {
1638 break;
1639 }
1640 language_indent_size = *size;
1641 language_indent_sizes.next();
1642 }
1643
1644 let suggested_indent = old_to_new_rows
1645 .get(&suggestion.basis_row)
1646 .and_then(|from_row| {
1647 Some(old_suggestions.get(from_row).copied()?.0)
1648 })
1649 .unwrap_or_else(|| {
1650 request
1651 .before_edit
1652 .indent_size_for_line(suggestion.basis_row)
1653 })
1654 .with_delta(suggestion.delta, language_indent_size);
1655 old_suggestions
1656 .insert(new_row, (suggested_indent, suggestion.within_error));
1657 }
1658 }
1659 yield_now().await;
1660 }
1661
1662 // Compute new suggestions for each line, but only include them in the result
1663 // if they differ from the old suggestion for that line.
1664 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1665 let mut language_indent_size = IndentSize::default();
1666 for (row_range, original_indent_column) in row_ranges {
1667 let new_edited_row_range = if request.is_block_mode {
1668 row_range.start..row_range.start + 1
1669 } else {
1670 row_range.clone()
1671 };
1672
1673 let suggestions = snapshot
1674 .suggest_autoindents(new_edited_row_range.clone())
1675 .into_iter()
1676 .flatten();
1677 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
1678 if let Some(suggestion) = suggestion {
1679 // Find the indent size based on the language for this row.
1680 while let Some((row, size)) = language_indent_sizes.peek() {
1681 if *row > new_row {
1682 break;
1683 }
1684 language_indent_size = *size;
1685 language_indent_sizes.next();
1686 }
1687
1688 let suggested_indent = indent_sizes
1689 .get(&suggestion.basis_row)
1690 .copied()
1691 .map(|e| e.0)
1692 .unwrap_or_else(|| {
1693 snapshot.indent_size_for_line(suggestion.basis_row)
1694 })
1695 .with_delta(suggestion.delta, language_indent_size);
1696
1697 if old_suggestions.get(&new_row).map_or(
1698 true,
1699 |(old_indentation, was_within_error)| {
1700 suggested_indent != *old_indentation
1701 && (!suggestion.within_error || *was_within_error)
1702 },
1703 ) {
1704 indent_sizes.insert(
1705 new_row,
1706 (suggested_indent, request.ignore_empty_lines),
1707 );
1708 }
1709 }
1710 }
1711
1712 if let (true, Some(original_indent_column)) =
1713 (request.is_block_mode, original_indent_column)
1714 {
1715 let new_indent =
1716 if let Some((indent, _)) = indent_sizes.get(&row_range.start) {
1717 *indent
1718 } else {
1719 snapshot.indent_size_for_line(row_range.start)
1720 };
1721 let delta = new_indent.len as i64 - original_indent_column as i64;
1722 if delta != 0 {
1723 for row in row_range.skip(1) {
1724 indent_sizes.entry(row).or_insert_with(|| {
1725 let mut size = snapshot.indent_size_for_line(row);
1726 if size.kind == new_indent.kind {
1727 match delta.cmp(&0) {
1728 Ordering::Greater => size.len += delta as u32,
1729 Ordering::Less => {
1730 size.len = size.len.saturating_sub(-delta as u32)
1731 }
1732 Ordering::Equal => {}
1733 }
1734 }
1735 (size, request.ignore_empty_lines)
1736 });
1737 }
1738 }
1739 }
1740
1741 yield_now().await;
1742 }
1743 }
1744
1745 indent_sizes
1746 .into_iter()
1747 .filter_map(|(row, (indent, ignore_empty_lines))| {
1748 if ignore_empty_lines && snapshot.line_len(row) == 0 {
1749 None
1750 } else {
1751 Some((row, indent))
1752 }
1753 })
1754 .collect()
1755 })
1756 }
1757
1758 fn apply_autoindents(
1759 &mut self,
1760 indent_sizes: BTreeMap<u32, IndentSize>,
1761 cx: &mut Context<Self>,
1762 ) {
1763 self.autoindent_requests.clear();
1764
1765 let edits: Vec<_> = indent_sizes
1766 .into_iter()
1767 .filter_map(|(row, indent_size)| {
1768 let current_size = indent_size_for_line(self, row);
1769 Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
1770 })
1771 .collect();
1772
1773 let preserve_preview = self.preserve_preview();
1774 self.edit(edits, None, cx);
1775 if preserve_preview {
1776 self.refresh_preview();
1777 }
1778 }
1779
1780 /// Create a minimal edit that will cause the given row to be indented
1781 /// with the given size. After applying this edit, the length of the line
1782 /// will always be at least `new_size.len`.
1783 pub fn edit_for_indent_size_adjustment(
1784 row: u32,
1785 current_size: IndentSize,
1786 new_size: IndentSize,
1787 ) -> Option<(Range<Point>, String)> {
1788 if new_size.kind == current_size.kind {
1789 match new_size.len.cmp(¤t_size.len) {
1790 Ordering::Greater => {
1791 let point = Point::new(row, 0);
1792 Some((
1793 point..point,
1794 iter::repeat(new_size.char())
1795 .take((new_size.len - current_size.len) as usize)
1796 .collect::<String>(),
1797 ))
1798 }
1799
1800 Ordering::Less => Some((
1801 Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
1802 String::new(),
1803 )),
1804
1805 Ordering::Equal => None,
1806 }
1807 } else {
1808 Some((
1809 Point::new(row, 0)..Point::new(row, current_size.len),
1810 iter::repeat(new_size.char())
1811 .take(new_size.len as usize)
1812 .collect::<String>(),
1813 ))
1814 }
1815 }
1816
1817 /// Spawns a background task that asynchronously computes a `Diff` between the buffer's text
1818 /// and the given new text.
1819 pub fn diff(&self, mut new_text: String, cx: &App) -> Task<Diff> {
1820 let old_text = self.as_rope().clone();
1821 let base_version = self.version();
1822 cx.background_executor()
1823 .spawn_labeled(*BUFFER_DIFF_TASK, async move {
1824 let old_text = old_text.to_string();
1825 let line_ending = LineEnding::detect(&new_text);
1826 LineEnding::normalize(&mut new_text);
1827 let edits = text_diff(&old_text, &new_text);
1828 Diff {
1829 base_version,
1830 line_ending,
1831 edits,
1832 }
1833 })
1834 }
1835
1836 /// Spawns a background task that searches the buffer for any whitespace
1837 /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
1838 pub fn remove_trailing_whitespace(&self, cx: &App) -> Task<Diff> {
1839 let old_text = self.as_rope().clone();
1840 let line_ending = self.line_ending();
1841 let base_version = self.version();
1842 cx.background_spawn(async move {
1843 let ranges = trailing_whitespace_ranges(&old_text);
1844 let empty = Arc::<str>::from("");
1845 Diff {
1846 base_version,
1847 line_ending,
1848 edits: ranges
1849 .into_iter()
1850 .map(|range| (range, empty.clone()))
1851 .collect(),
1852 }
1853 })
1854 }
1855
1856 /// Ensures that the buffer ends with a single newline character, and
1857 /// no other whitespace.
1858 pub fn ensure_final_newline(&mut self, cx: &mut Context<Self>) {
1859 let len = self.len();
1860 let mut offset = len;
1861 for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
1862 let non_whitespace_len = chunk
1863 .trim_end_matches(|c: char| c.is_ascii_whitespace())
1864 .len();
1865 offset -= chunk.len();
1866 offset += non_whitespace_len;
1867 if non_whitespace_len != 0 {
1868 if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
1869 return;
1870 }
1871 break;
1872 }
1873 }
1874 self.edit([(offset..len, "\n")], None, cx);
1875 }
1876
1877 /// Applies a diff to the buffer. If the buffer has changed since the given diff was
1878 /// calculated, then adjust the diff to account for those changes, and discard any
1879 /// parts of the diff that conflict with those changes.
1880 pub fn apply_diff(&mut self, diff: Diff, cx: &mut Context<Self>) -> Option<TransactionId> {
1881 let snapshot = self.snapshot();
1882 let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
1883 let mut delta = 0;
1884 let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
1885 while let Some(edit_since) = edits_since.peek() {
1886 // If the edit occurs after a diff hunk, then it does not
1887 // affect that hunk.
1888 if edit_since.old.start > range.end {
1889 break;
1890 }
1891 // If the edit precedes the diff hunk, then adjust the hunk
1892 // to reflect the edit.
1893 else if edit_since.old.end < range.start {
1894 delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
1895 edits_since.next();
1896 }
1897 // If the edit intersects a diff hunk, then discard that hunk.
1898 else {
1899 return None;
1900 }
1901 }
1902
1903 let start = (range.start as i64 + delta) as usize;
1904 let end = (range.end as i64 + delta) as usize;
1905 Some((start..end, new_text))
1906 });
1907
1908 self.start_transaction();
1909 self.text.set_line_ending(diff.line_ending);
1910 self.edit(adjusted_edits, None, cx);
1911 self.end_transaction(cx)
1912 }
1913
1914 fn has_unsaved_edits(&self) -> bool {
1915 let (last_version, has_unsaved_edits) = self.has_unsaved_edits.take();
1916
1917 if last_version == self.version {
1918 self.has_unsaved_edits
1919 .set((last_version, has_unsaved_edits));
1920 return has_unsaved_edits;
1921 }
1922
1923 let has_edits = self.has_edits_since(&self.saved_version);
1924 self.has_unsaved_edits
1925 .set((self.version.clone(), has_edits));
1926 has_edits
1927 }
1928
1929 /// Checks if the buffer has unsaved changes.
1930 pub fn is_dirty(&self) -> bool {
1931 if self.capability == Capability::ReadOnly {
1932 return false;
1933 }
1934 if self.has_conflict {
1935 return true;
1936 }
1937 match self.file.as_ref().map(|f| f.disk_state()) {
1938 Some(DiskState::New) | Some(DiskState::Deleted) => {
1939 !self.is_empty() && self.has_unsaved_edits()
1940 }
1941 _ => self.has_unsaved_edits(),
1942 }
1943 }
1944
1945 /// Checks if the buffer and its file have both changed since the buffer
1946 /// was last saved or reloaded.
1947 pub fn has_conflict(&self) -> bool {
1948 if self.has_conflict {
1949 return true;
1950 }
1951 let Some(file) = self.file.as_ref() else {
1952 return false;
1953 };
1954 match file.disk_state() {
1955 DiskState::New => false,
1956 DiskState::Present { mtime } => match self.saved_mtime {
1957 Some(saved_mtime) => {
1958 mtime.bad_is_greater_than(saved_mtime) && self.has_unsaved_edits()
1959 }
1960 None => true,
1961 },
1962 DiskState::Deleted => false,
1963 }
1964 }
1965
1966 /// Gets a [`Subscription`] that tracks all of the changes to the buffer's text.
1967 pub fn subscribe(&mut self) -> Subscription {
1968 self.text.subscribe()
1969 }
1970
1971 /// Adds a bit to the list of bits that are set when the buffer's text changes.
1972 ///
1973 /// This allows downstream code to check if the buffer's text has changed without
1974 /// waiting for an effect cycle, which would be required if using eents.
1975 pub fn record_changes(&mut self, bit: rc::Weak<Cell<bool>>) {
1976 if let Err(ix) = self
1977 .change_bits
1978 .binary_search_by_key(&rc::Weak::as_ptr(&bit), rc::Weak::as_ptr)
1979 {
1980 self.change_bits.insert(ix, bit);
1981 }
1982 }
1983
1984 fn was_changed(&mut self) {
1985 self.change_bits.retain(|change_bit| {
1986 change_bit.upgrade().map_or(false, |bit| {
1987 bit.replace(true);
1988 true
1989 })
1990 });
1991 }
1992
1993 /// Starts a transaction, if one is not already in-progress. When undoing or
1994 /// redoing edits, all of the edits performed within a transaction are undone
1995 /// or redone together.
1996 pub fn start_transaction(&mut self) -> Option<TransactionId> {
1997 self.start_transaction_at(Instant::now())
1998 }
1999
2000 /// Starts a transaction, providing the current time. Subsequent transactions
2001 /// that occur within a short period of time will be grouped together. This
2002 /// is controlled by the buffer's undo grouping duration.
2003 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
2004 self.transaction_depth += 1;
2005 if self.was_dirty_before_starting_transaction.is_none() {
2006 self.was_dirty_before_starting_transaction = Some(self.is_dirty());
2007 }
2008 self.text.start_transaction_at(now)
2009 }
2010
2011 /// Terminates the current transaction, if this is the outermost transaction.
2012 pub fn end_transaction(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2013 self.end_transaction_at(Instant::now(), cx)
2014 }
2015
2016 /// Terminates the current transaction, providing the current time. Subsequent transactions
2017 /// that occur within a short period of time will be grouped together. This
2018 /// is controlled by the buffer's undo grouping duration.
2019 pub fn end_transaction_at(
2020 &mut self,
2021 now: Instant,
2022 cx: &mut Context<Self>,
2023 ) -> Option<TransactionId> {
2024 assert!(self.transaction_depth > 0);
2025 self.transaction_depth -= 1;
2026 let was_dirty = if self.transaction_depth == 0 {
2027 self.was_dirty_before_starting_transaction.take().unwrap()
2028 } else {
2029 false
2030 };
2031 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
2032 self.did_edit(&start_version, was_dirty, cx);
2033 Some(transaction_id)
2034 } else {
2035 None
2036 }
2037 }
2038
2039 /// Manually add a transaction to the buffer's undo history.
2040 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
2041 self.text.push_transaction(transaction, now);
2042 }
2043
2044 /// Prevent the last transaction from being grouped with any subsequent transactions,
2045 /// even if they occur with the buffer's undo grouping duration.
2046 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
2047 self.text.finalize_last_transaction()
2048 }
2049
2050 /// Manually group all changes since a given transaction.
2051 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
2052 self.text.group_until_transaction(transaction_id);
2053 }
2054
2055 /// Manually remove a transaction from the buffer's undo history
2056 pub fn forget_transaction(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
2057 self.text.forget_transaction(transaction_id)
2058 }
2059
2060 /// Retrieve a transaction from the buffer's undo history
2061 pub fn get_transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> {
2062 self.text.get_transaction(transaction_id)
2063 }
2064
2065 /// Manually merge two transactions in the buffer's undo history.
2066 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
2067 self.text.merge_transactions(transaction, destination);
2068 }
2069
2070 /// Waits for the buffer to receive operations with the given timestamps.
2071 pub fn wait_for_edits<It: IntoIterator<Item = clock::Lamport>>(
2072 &mut self,
2073 edit_ids: It,
2074 ) -> impl Future<Output = Result<()>> + use<It> {
2075 self.text.wait_for_edits(edit_ids)
2076 }
2077
2078 /// Waits for the buffer to receive the operations necessary for resolving the given anchors.
2079 pub fn wait_for_anchors<It: IntoIterator<Item = Anchor>>(
2080 &mut self,
2081 anchors: It,
2082 ) -> impl 'static + Future<Output = Result<()>> + use<It> {
2083 self.text.wait_for_anchors(anchors)
2084 }
2085
2086 /// Waits for the buffer to receive operations up to the given version.
2087 pub fn wait_for_version(
2088 &mut self,
2089 version: clock::Global,
2090 ) -> impl Future<Output = Result<()>> + use<> {
2091 self.text.wait_for_version(version)
2092 }
2093
2094 /// Forces all futures returned by [`Buffer::wait_for_version`], [`Buffer::wait_for_edits`], or
2095 /// [`Buffer::wait_for_version`] to resolve with an error.
2096 pub fn give_up_waiting(&mut self) {
2097 self.text.give_up_waiting();
2098 }
2099
2100 /// Stores a set of selections that should be broadcasted to all of the buffer's replicas.
2101 pub fn set_active_selections(
2102 &mut self,
2103 selections: Arc<[Selection<Anchor>]>,
2104 line_mode: bool,
2105 cursor_shape: CursorShape,
2106 cx: &mut Context<Self>,
2107 ) {
2108 let lamport_timestamp = self.text.lamport_clock.tick();
2109 self.remote_selections.insert(
2110 self.text.replica_id(),
2111 SelectionSet {
2112 selections: selections.clone(),
2113 lamport_timestamp,
2114 line_mode,
2115 cursor_shape,
2116 },
2117 );
2118 self.send_operation(
2119 Operation::UpdateSelections {
2120 selections,
2121 line_mode,
2122 lamport_timestamp,
2123 cursor_shape,
2124 },
2125 true,
2126 cx,
2127 );
2128 self.non_text_state_update_count += 1;
2129 cx.notify();
2130 }
2131
2132 /// Clears the selections, so that other replicas of the buffer do not see any selections for
2133 /// this replica.
2134 pub fn remove_active_selections(&mut self, cx: &mut Context<Self>) {
2135 if self
2136 .remote_selections
2137 .get(&self.text.replica_id())
2138 .map_or(true, |set| !set.selections.is_empty())
2139 {
2140 self.set_active_selections(Arc::default(), false, Default::default(), cx);
2141 }
2142 }
2143
2144 pub fn set_agent_selections(
2145 &mut self,
2146 selections: Arc<[Selection<Anchor>]>,
2147 line_mode: bool,
2148 cursor_shape: CursorShape,
2149 cx: &mut Context<Self>,
2150 ) {
2151 let lamport_timestamp = self.text.lamport_clock.tick();
2152 self.remote_selections.insert(
2153 AGENT_REPLICA_ID,
2154 SelectionSet {
2155 selections: selections.clone(),
2156 lamport_timestamp,
2157 line_mode,
2158 cursor_shape,
2159 },
2160 );
2161 self.non_text_state_update_count += 1;
2162 cx.notify();
2163 }
2164
2165 pub fn remove_agent_selections(&mut self, cx: &mut Context<Self>) {
2166 self.set_agent_selections(Arc::default(), false, Default::default(), cx);
2167 }
2168
2169 /// Replaces the buffer's entire text.
2170 pub fn set_text<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2171 where
2172 T: Into<Arc<str>>,
2173 {
2174 self.autoindent_requests.clear();
2175 self.edit([(0..self.len(), text)], None, cx)
2176 }
2177
2178 /// Appends the given text to the end of the buffer.
2179 pub fn append<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2180 where
2181 T: Into<Arc<str>>,
2182 {
2183 self.edit([(self.len()..self.len(), text)], None, cx)
2184 }
2185
2186 /// Applies the given edits to the buffer. Each edit is specified as a range of text to
2187 /// delete, and a string of text to insert at that location.
2188 ///
2189 /// If an [`AutoindentMode`] is provided, then the buffer will enqueue an auto-indent
2190 /// request for the edited ranges, which will be processed when the buffer finishes
2191 /// parsing.
2192 ///
2193 /// Parsing takes place at the end of a transaction, and may compute synchronously
2194 /// or asynchronously, depending on the changes.
2195 pub fn edit<I, S, T>(
2196 &mut self,
2197 edits_iter: I,
2198 autoindent_mode: Option<AutoindentMode>,
2199 cx: &mut Context<Self>,
2200 ) -> Option<clock::Lamport>
2201 where
2202 I: IntoIterator<Item = (Range<S>, T)>,
2203 S: ToOffset,
2204 T: Into<Arc<str>>,
2205 {
2206 // Skip invalid edits and coalesce contiguous ones.
2207 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
2208
2209 for (range, new_text) in edits_iter {
2210 let mut range = range.start.to_offset(self)..range.end.to_offset(self);
2211
2212 if range.start > range.end {
2213 mem::swap(&mut range.start, &mut range.end);
2214 }
2215 let new_text = new_text.into();
2216 if !new_text.is_empty() || !range.is_empty() {
2217 if let Some((prev_range, prev_text)) = edits.last_mut() {
2218 if prev_range.end >= range.start {
2219 prev_range.end = cmp::max(prev_range.end, range.end);
2220 *prev_text = format!("{prev_text}{new_text}").into();
2221 } else {
2222 edits.push((range, new_text));
2223 }
2224 } else {
2225 edits.push((range, new_text));
2226 }
2227 }
2228 }
2229 if edits.is_empty() {
2230 return None;
2231 }
2232
2233 self.start_transaction();
2234 self.pending_autoindent.take();
2235 let autoindent_request = autoindent_mode
2236 .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
2237
2238 let edit_operation = self.text.edit(edits.iter().cloned());
2239 let edit_id = edit_operation.timestamp();
2240
2241 if let Some((before_edit, mode)) = autoindent_request {
2242 let mut delta = 0isize;
2243 let entries = edits
2244 .into_iter()
2245 .enumerate()
2246 .zip(&edit_operation.as_edit().unwrap().new_text)
2247 .map(|((ix, (range, _)), new_text)| {
2248 let new_text_length = new_text.len();
2249 let old_start = range.start.to_point(&before_edit);
2250 let new_start = (delta + range.start as isize) as usize;
2251 let range_len = range.end - range.start;
2252 delta += new_text_length as isize - range_len as isize;
2253
2254 // Decide what range of the insertion to auto-indent, and whether
2255 // the first line of the insertion should be considered a newly-inserted line
2256 // or an edit to an existing line.
2257 let mut range_of_insertion_to_indent = 0..new_text_length;
2258 let mut first_line_is_new = true;
2259
2260 let old_line_start = before_edit.indent_size_for_line(old_start.row).len;
2261 let old_line_end = before_edit.line_len(old_start.row);
2262
2263 if old_start.column > old_line_start {
2264 first_line_is_new = false;
2265 }
2266
2267 if !new_text.contains('\n')
2268 && (old_start.column + (range_len as u32) < old_line_end
2269 || old_line_end == old_line_start)
2270 {
2271 first_line_is_new = false;
2272 }
2273
2274 // When inserting text starting with a newline, avoid auto-indenting the
2275 // previous line.
2276 if new_text.starts_with('\n') {
2277 range_of_insertion_to_indent.start += 1;
2278 first_line_is_new = true;
2279 }
2280
2281 let mut original_indent_column = None;
2282 if let AutoindentMode::Block {
2283 original_indent_columns,
2284 } = &mode
2285 {
2286 original_indent_column = Some(if new_text.starts_with('\n') {
2287 indent_size_for_text(
2288 new_text[range_of_insertion_to_indent.clone()].chars(),
2289 )
2290 .len
2291 } else {
2292 original_indent_columns
2293 .get(ix)
2294 .copied()
2295 .flatten()
2296 .unwrap_or_else(|| {
2297 indent_size_for_text(
2298 new_text[range_of_insertion_to_indent.clone()].chars(),
2299 )
2300 .len
2301 })
2302 });
2303
2304 // Avoid auto-indenting the line after the edit.
2305 if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
2306 range_of_insertion_to_indent.end -= 1;
2307 }
2308 }
2309
2310 AutoindentRequestEntry {
2311 first_line_is_new,
2312 original_indent_column,
2313 indent_size: before_edit.language_indent_size_at(range.start, cx),
2314 range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
2315 ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
2316 }
2317 })
2318 .collect();
2319
2320 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2321 before_edit,
2322 entries,
2323 is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
2324 ignore_empty_lines: false,
2325 }));
2326 }
2327
2328 self.end_transaction(cx);
2329 self.send_operation(Operation::Buffer(edit_operation), true, cx);
2330 Some(edit_id)
2331 }
2332
2333 fn did_edit(&mut self, old_version: &clock::Global, was_dirty: bool, cx: &mut Context<Self>) {
2334 self.was_changed();
2335
2336 if self.edits_since::<usize>(old_version).next().is_none() {
2337 return;
2338 }
2339
2340 self.reparse(cx);
2341 cx.emit(BufferEvent::Edited);
2342 if was_dirty != self.is_dirty() {
2343 cx.emit(BufferEvent::DirtyChanged);
2344 }
2345 cx.notify();
2346 }
2347
2348 pub fn autoindent_ranges<I, T>(&mut self, ranges: I, cx: &mut Context<Self>)
2349 where
2350 I: IntoIterator<Item = Range<T>>,
2351 T: ToOffset + Copy,
2352 {
2353 let before_edit = self.snapshot();
2354 let entries = ranges
2355 .into_iter()
2356 .map(|range| AutoindentRequestEntry {
2357 range: before_edit.anchor_before(range.start)..before_edit.anchor_after(range.end),
2358 first_line_is_new: true,
2359 indent_size: before_edit.language_indent_size_at(range.start, cx),
2360 original_indent_column: None,
2361 })
2362 .collect();
2363 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2364 before_edit,
2365 entries,
2366 is_block_mode: false,
2367 ignore_empty_lines: true,
2368 }));
2369 self.request_autoindent(cx);
2370 }
2371
2372 // Inserts newlines at the given position to create an empty line, returning the start of the new line.
2373 // You can also request the insertion of empty lines above and below the line starting at the returned point.
2374 pub fn insert_empty_line(
2375 &mut self,
2376 position: impl ToPoint,
2377 space_above: bool,
2378 space_below: bool,
2379 cx: &mut Context<Self>,
2380 ) -> Point {
2381 let mut position = position.to_point(self);
2382
2383 self.start_transaction();
2384
2385 self.edit(
2386 [(position..position, "\n")],
2387 Some(AutoindentMode::EachLine),
2388 cx,
2389 );
2390
2391 if position.column > 0 {
2392 position += Point::new(1, 0);
2393 }
2394
2395 if !self.is_line_blank(position.row) {
2396 self.edit(
2397 [(position..position, "\n")],
2398 Some(AutoindentMode::EachLine),
2399 cx,
2400 );
2401 }
2402
2403 if space_above && position.row > 0 && !self.is_line_blank(position.row - 1) {
2404 self.edit(
2405 [(position..position, "\n")],
2406 Some(AutoindentMode::EachLine),
2407 cx,
2408 );
2409 position.row += 1;
2410 }
2411
2412 if space_below
2413 && (position.row == self.max_point().row || !self.is_line_blank(position.row + 1))
2414 {
2415 self.edit(
2416 [(position..position, "\n")],
2417 Some(AutoindentMode::EachLine),
2418 cx,
2419 );
2420 }
2421
2422 self.end_transaction(cx);
2423
2424 position
2425 }
2426
2427 /// Applies the given remote operations to the buffer.
2428 pub fn apply_ops<I: IntoIterator<Item = Operation>>(&mut self, ops: I, cx: &mut Context<Self>) {
2429 self.pending_autoindent.take();
2430 let was_dirty = self.is_dirty();
2431 let old_version = self.version.clone();
2432 let mut deferred_ops = Vec::new();
2433 let buffer_ops = ops
2434 .into_iter()
2435 .filter_map(|op| match op {
2436 Operation::Buffer(op) => Some(op),
2437 _ => {
2438 if self.can_apply_op(&op) {
2439 self.apply_op(op, cx);
2440 } else {
2441 deferred_ops.push(op);
2442 }
2443 None
2444 }
2445 })
2446 .collect::<Vec<_>>();
2447 for operation in buffer_ops.iter() {
2448 self.send_operation(Operation::Buffer(operation.clone()), false, cx);
2449 }
2450 self.text.apply_ops(buffer_ops);
2451 self.deferred_ops.insert(deferred_ops);
2452 self.flush_deferred_ops(cx);
2453 self.did_edit(&old_version, was_dirty, cx);
2454 // Notify independently of whether the buffer was edited as the operations could include a
2455 // selection update.
2456 cx.notify();
2457 }
2458
2459 fn flush_deferred_ops(&mut self, cx: &mut Context<Self>) {
2460 let mut deferred_ops = Vec::new();
2461 for op in self.deferred_ops.drain().iter().cloned() {
2462 if self.can_apply_op(&op) {
2463 self.apply_op(op, cx);
2464 } else {
2465 deferred_ops.push(op);
2466 }
2467 }
2468 self.deferred_ops.insert(deferred_ops);
2469 }
2470
2471 pub fn has_deferred_ops(&self) -> bool {
2472 !self.deferred_ops.is_empty() || self.text.has_deferred_ops()
2473 }
2474
2475 fn can_apply_op(&self, operation: &Operation) -> bool {
2476 match operation {
2477 Operation::Buffer(_) => {
2478 unreachable!("buffer operations should never be applied at this layer")
2479 }
2480 Operation::UpdateDiagnostics {
2481 diagnostics: diagnostic_set,
2482 ..
2483 } => diagnostic_set.iter().all(|diagnostic| {
2484 self.text.can_resolve(&diagnostic.range.start)
2485 && self.text.can_resolve(&diagnostic.range.end)
2486 }),
2487 Operation::UpdateSelections { selections, .. } => selections
2488 .iter()
2489 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
2490 Operation::UpdateCompletionTriggers { .. } => true,
2491 }
2492 }
2493
2494 fn apply_op(&mut self, operation: Operation, cx: &mut Context<Self>) {
2495 match operation {
2496 Operation::Buffer(_) => {
2497 unreachable!("buffer operations should never be applied at this layer")
2498 }
2499 Operation::UpdateDiagnostics {
2500 server_id,
2501 diagnostics: diagnostic_set,
2502 lamport_timestamp,
2503 } => {
2504 let snapshot = self.snapshot();
2505 self.apply_diagnostic_update(
2506 server_id,
2507 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
2508 lamport_timestamp,
2509 cx,
2510 );
2511 }
2512 Operation::UpdateSelections {
2513 selections,
2514 lamport_timestamp,
2515 line_mode,
2516 cursor_shape,
2517 } => {
2518 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id) {
2519 if set.lamport_timestamp > lamport_timestamp {
2520 return;
2521 }
2522 }
2523
2524 self.remote_selections.insert(
2525 lamport_timestamp.replica_id,
2526 SelectionSet {
2527 selections,
2528 lamport_timestamp,
2529 line_mode,
2530 cursor_shape,
2531 },
2532 );
2533 self.text.lamport_clock.observe(lamport_timestamp);
2534 self.non_text_state_update_count += 1;
2535 }
2536 Operation::UpdateCompletionTriggers {
2537 triggers,
2538 lamport_timestamp,
2539 server_id,
2540 } => {
2541 if triggers.is_empty() {
2542 self.completion_triggers_per_language_server
2543 .remove(&server_id);
2544 self.completion_triggers = self
2545 .completion_triggers_per_language_server
2546 .values()
2547 .flat_map(|triggers| triggers.into_iter().cloned())
2548 .collect();
2549 } else {
2550 self.completion_triggers_per_language_server
2551 .insert(server_id, triggers.iter().cloned().collect());
2552 self.completion_triggers.extend(triggers);
2553 }
2554 self.text.lamport_clock.observe(lamport_timestamp);
2555 }
2556 }
2557 }
2558
2559 fn apply_diagnostic_update(
2560 &mut self,
2561 server_id: LanguageServerId,
2562 diagnostics: DiagnosticSet,
2563 lamport_timestamp: clock::Lamport,
2564 cx: &mut Context<Self>,
2565 ) {
2566 if lamport_timestamp > self.diagnostics_timestamp {
2567 let ix = self.diagnostics.binary_search_by_key(&server_id, |e| e.0);
2568 if diagnostics.is_empty() {
2569 if let Ok(ix) = ix {
2570 self.diagnostics.remove(ix);
2571 }
2572 } else {
2573 match ix {
2574 Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)),
2575 Ok(ix) => self.diagnostics[ix].1 = diagnostics,
2576 };
2577 }
2578 self.diagnostics_timestamp = lamport_timestamp;
2579 self.non_text_state_update_count += 1;
2580 self.text.lamport_clock.observe(lamport_timestamp);
2581 cx.notify();
2582 cx.emit(BufferEvent::DiagnosticsUpdated);
2583 }
2584 }
2585
2586 fn send_operation(&mut self, operation: Operation, is_local: bool, cx: &mut Context<Self>) {
2587 self.was_changed();
2588 cx.emit(BufferEvent::Operation {
2589 operation,
2590 is_local,
2591 });
2592 }
2593
2594 /// Removes the selections for a given peer.
2595 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut Context<Self>) {
2596 self.remote_selections.remove(&replica_id);
2597 cx.notify();
2598 }
2599
2600 /// Undoes the most recent transaction.
2601 pub fn undo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2602 let was_dirty = self.is_dirty();
2603 let old_version = self.version.clone();
2604
2605 if let Some((transaction_id, operation)) = self.text.undo() {
2606 self.send_operation(Operation::Buffer(operation), true, cx);
2607 self.did_edit(&old_version, was_dirty, cx);
2608 Some(transaction_id)
2609 } else {
2610 None
2611 }
2612 }
2613
2614 /// Manually undoes a specific transaction in the buffer's undo history.
2615 pub fn undo_transaction(
2616 &mut self,
2617 transaction_id: TransactionId,
2618 cx: &mut Context<Self>,
2619 ) -> bool {
2620 let was_dirty = self.is_dirty();
2621 let old_version = self.version.clone();
2622 if let Some(operation) = self.text.undo_transaction(transaction_id) {
2623 self.send_operation(Operation::Buffer(operation), true, cx);
2624 self.did_edit(&old_version, was_dirty, cx);
2625 true
2626 } else {
2627 false
2628 }
2629 }
2630
2631 /// Manually undoes all changes after a given transaction in the buffer's undo history.
2632 pub fn undo_to_transaction(
2633 &mut self,
2634 transaction_id: TransactionId,
2635 cx: &mut Context<Self>,
2636 ) -> bool {
2637 let was_dirty = self.is_dirty();
2638 let old_version = self.version.clone();
2639
2640 let operations = self.text.undo_to_transaction(transaction_id);
2641 let undone = !operations.is_empty();
2642 for operation in operations {
2643 self.send_operation(Operation::Buffer(operation), true, cx);
2644 }
2645 if undone {
2646 self.did_edit(&old_version, was_dirty, cx)
2647 }
2648 undone
2649 }
2650
2651 pub fn undo_operations(&mut self, counts: HashMap<Lamport, u32>, cx: &mut Context<Buffer>) {
2652 let was_dirty = self.is_dirty();
2653 let operation = self.text.undo_operations(counts);
2654 let old_version = self.version.clone();
2655 self.send_operation(Operation::Buffer(operation), true, cx);
2656 self.did_edit(&old_version, was_dirty, cx);
2657 }
2658
2659 /// Manually redoes a specific transaction in the buffer's redo history.
2660 pub fn redo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2661 let was_dirty = self.is_dirty();
2662 let old_version = self.version.clone();
2663
2664 if let Some((transaction_id, operation)) = self.text.redo() {
2665 self.send_operation(Operation::Buffer(operation), true, cx);
2666 self.did_edit(&old_version, was_dirty, cx);
2667 Some(transaction_id)
2668 } else {
2669 None
2670 }
2671 }
2672
2673 /// Manually undoes all changes until a given transaction in the buffer's redo history.
2674 pub fn redo_to_transaction(
2675 &mut self,
2676 transaction_id: TransactionId,
2677 cx: &mut Context<Self>,
2678 ) -> bool {
2679 let was_dirty = self.is_dirty();
2680 let old_version = self.version.clone();
2681
2682 let operations = self.text.redo_to_transaction(transaction_id);
2683 let redone = !operations.is_empty();
2684 for operation in operations {
2685 self.send_operation(Operation::Buffer(operation), true, cx);
2686 }
2687 if redone {
2688 self.did_edit(&old_version, was_dirty, cx)
2689 }
2690 redone
2691 }
2692
2693 /// Override current completion triggers with the user-provided completion triggers.
2694 pub fn set_completion_triggers(
2695 &mut self,
2696 server_id: LanguageServerId,
2697 triggers: BTreeSet<String>,
2698 cx: &mut Context<Self>,
2699 ) {
2700 self.completion_triggers_timestamp = self.text.lamport_clock.tick();
2701 if triggers.is_empty() {
2702 self.completion_triggers_per_language_server
2703 .remove(&server_id);
2704 self.completion_triggers = self
2705 .completion_triggers_per_language_server
2706 .values()
2707 .flat_map(|triggers| triggers.into_iter().cloned())
2708 .collect();
2709 } else {
2710 self.completion_triggers_per_language_server
2711 .insert(server_id, triggers.clone());
2712 self.completion_triggers.extend(triggers.iter().cloned());
2713 }
2714 self.send_operation(
2715 Operation::UpdateCompletionTriggers {
2716 triggers: triggers.into_iter().collect(),
2717 lamport_timestamp: self.completion_triggers_timestamp,
2718 server_id,
2719 },
2720 true,
2721 cx,
2722 );
2723 cx.notify();
2724 }
2725
2726 /// Returns a list of strings which trigger a completion menu for this language.
2727 /// Usually this is driven by LSP server which returns a list of trigger characters for completions.
2728 pub fn completion_triggers(&self) -> &BTreeSet<String> {
2729 &self.completion_triggers
2730 }
2731
2732 /// Call this directly after performing edits to prevent the preview tab
2733 /// from being dismissed by those edits. It causes `should_dismiss_preview`
2734 /// to return false until there are additional edits.
2735 pub fn refresh_preview(&mut self) {
2736 self.preview_version = self.version.clone();
2737 }
2738
2739 /// Whether we should preserve the preview status of a tab containing this buffer.
2740 pub fn preserve_preview(&self) -> bool {
2741 !self.has_edits_since(&self.preview_version)
2742 }
2743}
2744
2745#[doc(hidden)]
2746#[cfg(any(test, feature = "test-support"))]
2747impl Buffer {
2748 pub fn edit_via_marked_text(
2749 &mut self,
2750 marked_string: &str,
2751 autoindent_mode: Option<AutoindentMode>,
2752 cx: &mut Context<Self>,
2753 ) {
2754 let edits = self.edits_for_marked_text(marked_string);
2755 self.edit(edits, autoindent_mode, cx);
2756 }
2757
2758 pub fn set_group_interval(&mut self, group_interval: Duration) {
2759 self.text.set_group_interval(group_interval);
2760 }
2761
2762 pub fn randomly_edit<T>(&mut self, rng: &mut T, old_range_count: usize, cx: &mut Context<Self>)
2763 where
2764 T: rand::Rng,
2765 {
2766 let mut edits: Vec<(Range<usize>, String)> = Vec::new();
2767 let mut last_end = None;
2768 for _ in 0..old_range_count {
2769 if last_end.map_or(false, |last_end| last_end >= self.len()) {
2770 break;
2771 }
2772
2773 let new_start = last_end.map_or(0, |last_end| last_end + 1);
2774 let mut range = self.random_byte_range(new_start, rng);
2775 if rng.gen_bool(0.2) {
2776 mem::swap(&mut range.start, &mut range.end);
2777 }
2778 last_end = Some(range.end);
2779
2780 let new_text_len = rng.gen_range(0..10);
2781 let mut new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
2782 new_text = new_text.to_uppercase();
2783
2784 edits.push((range, new_text));
2785 }
2786 log::info!("mutating buffer {} with {:?}", self.replica_id(), edits);
2787 self.edit(edits, None, cx);
2788 }
2789
2790 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut Context<Self>) {
2791 let was_dirty = self.is_dirty();
2792 let old_version = self.version.clone();
2793
2794 let ops = self.text.randomly_undo_redo(rng);
2795 if !ops.is_empty() {
2796 for op in ops {
2797 self.send_operation(Operation::Buffer(op), true, cx);
2798 self.did_edit(&old_version, was_dirty, cx);
2799 }
2800 }
2801 }
2802}
2803
2804impl EventEmitter<BufferEvent> for Buffer {}
2805
2806impl Deref for Buffer {
2807 type Target = TextBuffer;
2808
2809 fn deref(&self) -> &Self::Target {
2810 &self.text
2811 }
2812}
2813
2814impl BufferSnapshot {
2815 /// Returns [`IndentSize`] for a given line that respects user settings and
2816 /// language preferences.
2817 pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
2818 indent_size_for_line(self, row)
2819 }
2820
2821 /// Returns [`IndentSize`] for a given position that respects user settings
2822 /// and language preferences.
2823 pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &App) -> IndentSize {
2824 let settings = language_settings(
2825 self.language_at(position).map(|l| l.name()),
2826 self.file(),
2827 cx,
2828 );
2829 if settings.hard_tabs {
2830 IndentSize::tab()
2831 } else {
2832 IndentSize::spaces(settings.tab_size.get())
2833 }
2834 }
2835
2836 /// Retrieve the suggested indent size for all of the given rows. The unit of indentation
2837 /// is passed in as `single_indent_size`.
2838 pub fn suggested_indents(
2839 &self,
2840 rows: impl Iterator<Item = u32>,
2841 single_indent_size: IndentSize,
2842 ) -> BTreeMap<u32, IndentSize> {
2843 let mut result = BTreeMap::new();
2844
2845 for row_range in contiguous_ranges(rows, 10) {
2846 let suggestions = match self.suggest_autoindents(row_range.clone()) {
2847 Some(suggestions) => suggestions,
2848 _ => break,
2849 };
2850
2851 for (row, suggestion) in row_range.zip(suggestions) {
2852 let indent_size = if let Some(suggestion) = suggestion {
2853 result
2854 .get(&suggestion.basis_row)
2855 .copied()
2856 .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
2857 .with_delta(suggestion.delta, single_indent_size)
2858 } else {
2859 self.indent_size_for_line(row)
2860 };
2861
2862 result.insert(row, indent_size);
2863 }
2864 }
2865
2866 result
2867 }
2868
2869 fn suggest_autoindents(
2870 &self,
2871 row_range: Range<u32>,
2872 ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
2873 let config = &self.language.as_ref()?.config;
2874 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
2875 let significant_indentation = config.significant_indentation;
2876
2877 // Find the suggested indentation ranges based on the syntax tree.
2878 let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
2879 let end = Point::new(row_range.end, 0);
2880 let range = (start..end).to_offset(&self.text);
2881 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2882 Some(&grammar.indents_config.as_ref()?.query)
2883 });
2884 let indent_configs = matches
2885 .grammars()
2886 .iter()
2887 .map(|grammar| grammar.indents_config.as_ref().unwrap())
2888 .collect::<Vec<_>>();
2889
2890 let mut indent_ranges = Vec::<Range<Point>>::new();
2891 let mut outdent_positions = Vec::<Point>::new();
2892 while let Some(mat) = matches.peek() {
2893 let mut start: Option<Point> = None;
2894 let mut end: Option<Point> = None;
2895 let mut outdent: Option<Point> = None;
2896
2897 let config = &indent_configs[mat.grammar_index];
2898 for capture in mat.captures {
2899 if capture.index == config.indent_capture_ix {
2900 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
2901 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
2902 } else if Some(capture.index) == config.start_capture_ix {
2903 start = Some(Point::from_ts_point(capture.node.end_position()));
2904 } else if Some(capture.index) == config.end_capture_ix {
2905 end = Some(Point::from_ts_point(capture.node.start_position()));
2906 } else if Some(capture.index) == config.outdent_capture_ix {
2907 let point = Point::from_ts_point(capture.node.start_position());
2908 outdent.get_or_insert(point);
2909 outdent_positions.push(point);
2910 }
2911 }
2912
2913 matches.advance();
2914 // in case of significant indentation expand end to outdent position
2915 let end = if significant_indentation {
2916 outdent.or(end)
2917 } else {
2918 end
2919 };
2920 if let Some((start, end)) = start.zip(end) {
2921 if start.row == end.row && (!significant_indentation || start.column < end.column) {
2922 continue;
2923 }
2924 let range = start..end;
2925 match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
2926 Err(ix) => indent_ranges.insert(ix, range),
2927 Ok(ix) => {
2928 let prev_range = &mut indent_ranges[ix];
2929 prev_range.end = prev_range.end.max(range.end);
2930 }
2931 }
2932 }
2933 }
2934
2935 let mut error_ranges = Vec::<Range<Point>>::new();
2936 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2937 grammar.error_query.as_ref()
2938 });
2939 while let Some(mat) = matches.peek() {
2940 let node = mat.captures[0].node;
2941 let start = Point::from_ts_point(node.start_position());
2942 let end = Point::from_ts_point(node.end_position());
2943 let range = start..end;
2944 let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
2945 Ok(ix) | Err(ix) => ix,
2946 };
2947 let mut end_ix = ix;
2948 while let Some(existing_range) = error_ranges.get(end_ix) {
2949 if existing_range.end < end {
2950 end_ix += 1;
2951 } else {
2952 break;
2953 }
2954 }
2955 error_ranges.splice(ix..end_ix, [range]);
2956 matches.advance();
2957 }
2958
2959 // we don't use outdent positions to truncate in case of significant indentation
2960 // rather we use them to expand (handled above)
2961 if !significant_indentation {
2962 outdent_positions.sort();
2963 for outdent_position in outdent_positions {
2964 // find the innermost indent range containing this outdent_position
2965 // set its end to the outdent position
2966 if let Some(range_to_truncate) = indent_ranges
2967 .iter_mut()
2968 .filter(|indent_range| indent_range.contains(&outdent_position))
2969 .next_back()
2970 {
2971 range_to_truncate.end = outdent_position;
2972 }
2973 }
2974 }
2975
2976 // Find the suggested indentation increases and decreased based on regexes.
2977 let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
2978 self.for_each_line(
2979 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
2980 ..Point::new(row_range.end, 0),
2981 |row, line| {
2982 if config
2983 .decrease_indent_pattern
2984 .as_ref()
2985 .map_or(false, |regex| regex.is_match(line))
2986 {
2987 indent_change_rows.push((row, Ordering::Less));
2988 }
2989 if config
2990 .increase_indent_pattern
2991 .as_ref()
2992 .map_or(false, |regex| regex.is_match(line))
2993 {
2994 indent_change_rows.push((row + 1, Ordering::Greater));
2995 }
2996 },
2997 );
2998
2999 let mut indent_changes = indent_change_rows.into_iter().peekable();
3000 let mut prev_row = if config.auto_indent_using_last_non_empty_line {
3001 prev_non_blank_row.unwrap_or(0)
3002 } else {
3003 row_range.start.saturating_sub(1)
3004 };
3005 let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
3006 Some(row_range.map(move |row| {
3007 let row_start = Point::new(row, self.indent_size_for_line(row).len);
3008
3009 let mut indent_from_prev_row = false;
3010 let mut outdent_from_prev_row = false;
3011 let mut outdent_to_row = u32::MAX;
3012 let mut from_regex = false;
3013
3014 while let Some((indent_row, delta)) = indent_changes.peek() {
3015 match indent_row.cmp(&row) {
3016 Ordering::Equal => match delta {
3017 Ordering::Less => {
3018 from_regex = true;
3019 outdent_from_prev_row = true
3020 }
3021 Ordering::Greater => {
3022 indent_from_prev_row = true;
3023 from_regex = true
3024 }
3025 _ => {}
3026 },
3027
3028 Ordering::Greater => break,
3029 Ordering::Less => {}
3030 }
3031
3032 indent_changes.next();
3033 }
3034
3035 for range in &indent_ranges {
3036 if range.start.row >= row {
3037 break;
3038 }
3039 if range.start.row == prev_row && range.end > row_start {
3040 indent_from_prev_row = true;
3041 }
3042 if significant_indentation && self.is_line_blank(row) && range.start.row == prev_row
3043 {
3044 indent_from_prev_row = true;
3045 }
3046 if !significant_indentation || !self.is_line_blank(row) {
3047 if range.end > prev_row_start && range.end <= row_start {
3048 outdent_to_row = outdent_to_row.min(range.start.row);
3049 }
3050 }
3051 }
3052
3053 let within_error = error_ranges
3054 .iter()
3055 .any(|e| e.start.row < row && e.end > row_start);
3056
3057 let suggestion = if outdent_to_row == prev_row
3058 || (outdent_from_prev_row && indent_from_prev_row)
3059 {
3060 Some(IndentSuggestion {
3061 basis_row: prev_row,
3062 delta: Ordering::Equal,
3063 within_error: within_error && !from_regex,
3064 })
3065 } else if indent_from_prev_row {
3066 Some(IndentSuggestion {
3067 basis_row: prev_row,
3068 delta: Ordering::Greater,
3069 within_error: within_error && !from_regex,
3070 })
3071 } else if outdent_to_row < prev_row {
3072 Some(IndentSuggestion {
3073 basis_row: outdent_to_row,
3074 delta: Ordering::Equal,
3075 within_error: within_error && !from_regex,
3076 })
3077 } else if outdent_from_prev_row {
3078 Some(IndentSuggestion {
3079 basis_row: prev_row,
3080 delta: Ordering::Less,
3081 within_error: within_error && !from_regex,
3082 })
3083 } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
3084 {
3085 Some(IndentSuggestion {
3086 basis_row: prev_row,
3087 delta: Ordering::Equal,
3088 within_error: within_error && !from_regex,
3089 })
3090 } else {
3091 None
3092 };
3093
3094 prev_row = row;
3095 prev_row_start = row_start;
3096 suggestion
3097 }))
3098 }
3099
3100 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
3101 while row > 0 {
3102 row -= 1;
3103 if !self.is_line_blank(row) {
3104 return Some(row);
3105 }
3106 }
3107 None
3108 }
3109
3110 fn get_highlights(&self, range: Range<usize>) -> (SyntaxMapCaptures, Vec<HighlightMap>) {
3111 let captures = self.syntax.captures(range, &self.text, |grammar| {
3112 grammar.highlights_query.as_ref()
3113 });
3114 let highlight_maps = captures
3115 .grammars()
3116 .iter()
3117 .map(|grammar| grammar.highlight_map())
3118 .collect();
3119 (captures, highlight_maps)
3120 }
3121
3122 /// Iterates over chunks of text in the given range of the buffer. Text is chunked
3123 /// in an arbitrary way due to being stored in a [`Rope`](text::Rope). The text is also
3124 /// returned in chunks where each chunk has a single syntax highlighting style and
3125 /// diagnostic status.
3126 pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks {
3127 let range = range.start.to_offset(self)..range.end.to_offset(self);
3128
3129 let mut syntax = None;
3130 if language_aware {
3131 syntax = Some(self.get_highlights(range.clone()));
3132 }
3133 // We want to look at diagnostic spans only when iterating over language-annotated chunks.
3134 let diagnostics = language_aware;
3135 BufferChunks::new(self.text.as_rope(), range, syntax, diagnostics, Some(self))
3136 }
3137
3138 pub fn highlighted_text_for_range<T: ToOffset>(
3139 &self,
3140 range: Range<T>,
3141 override_style: Option<HighlightStyle>,
3142 syntax_theme: &SyntaxTheme,
3143 ) -> HighlightedText {
3144 HighlightedText::from_buffer_range(
3145 range,
3146 &self.text,
3147 &self.syntax,
3148 override_style,
3149 syntax_theme,
3150 )
3151 }
3152
3153 /// Invokes the given callback for each line of text in the given range of the buffer.
3154 /// Uses callback to avoid allocating a string for each line.
3155 fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
3156 let mut line = String::new();
3157 let mut row = range.start.row;
3158 for chunk in self
3159 .as_rope()
3160 .chunks_in_range(range.to_offset(self))
3161 .chain(["\n"])
3162 {
3163 for (newline_ix, text) in chunk.split('\n').enumerate() {
3164 if newline_ix > 0 {
3165 callback(row, &line);
3166 row += 1;
3167 line.clear();
3168 }
3169 line.push_str(text);
3170 }
3171 }
3172 }
3173
3174 /// Iterates over every [`SyntaxLayer`] in the buffer.
3175 pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayer> + '_ {
3176 self.syntax
3177 .layers_for_range(0..self.len(), &self.text, true)
3178 }
3179
3180 pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayer> {
3181 let offset = position.to_offset(self);
3182 self.syntax
3183 .layers_for_range(offset..offset, &self.text, false)
3184 .filter(|l| l.node().end_byte() > offset)
3185 .last()
3186 }
3187
3188 pub fn smallest_syntax_layer_containing<D: ToOffset>(
3189 &self,
3190 range: Range<D>,
3191 ) -> Option<SyntaxLayer> {
3192 let range = range.to_offset(self);
3193 return self
3194 .syntax
3195 .layers_for_range(range, &self.text, false)
3196 .max_by(|a, b| {
3197 if a.depth != b.depth {
3198 a.depth.cmp(&b.depth)
3199 } else if a.offset.0 != b.offset.0 {
3200 a.offset.0.cmp(&b.offset.0)
3201 } else {
3202 a.node().end_byte().cmp(&b.node().end_byte()).reverse()
3203 }
3204 });
3205 }
3206
3207 /// Returns the main [`Language`].
3208 pub fn language(&self) -> Option<&Arc<Language>> {
3209 self.language.as_ref()
3210 }
3211
3212 /// Returns the [`Language`] at the given location.
3213 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
3214 self.syntax_layer_at(position)
3215 .map(|info| info.language)
3216 .or(self.language.as_ref())
3217 }
3218
3219 /// Returns the settings for the language at the given location.
3220 pub fn settings_at<'a, D: ToOffset>(
3221 &'a self,
3222 position: D,
3223 cx: &'a App,
3224 ) -> Cow<'a, LanguageSettings> {
3225 language_settings(
3226 self.language_at(position).map(|l| l.name()),
3227 self.file.as_ref(),
3228 cx,
3229 )
3230 }
3231
3232 pub fn char_classifier_at<T: ToOffset>(&self, point: T) -> CharClassifier {
3233 CharClassifier::new(self.language_scope_at(point))
3234 }
3235
3236 /// Returns the [`LanguageScope`] at the given location.
3237 pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
3238 let offset = position.to_offset(self);
3239 let mut scope = None;
3240 let mut smallest_range_and_depth: Option<(Range<usize>, usize)> = None;
3241
3242 // Use the layer that has the smallest node intersecting the given point.
3243 for layer in self
3244 .syntax
3245 .layers_for_range(offset..offset, &self.text, false)
3246 {
3247 let mut cursor = layer.node().walk();
3248
3249 let mut range = None;
3250 loop {
3251 let child_range = cursor.node().byte_range();
3252 if !child_range.contains(&offset) {
3253 break;
3254 }
3255
3256 range = Some(child_range);
3257 if cursor.goto_first_child_for_byte(offset).is_none() {
3258 break;
3259 }
3260 }
3261
3262 if let Some(range) = range {
3263 if smallest_range_and_depth.as_ref().map_or(
3264 true,
3265 |(smallest_range, smallest_range_depth)| {
3266 if layer.depth > *smallest_range_depth {
3267 true
3268 } else if layer.depth == *smallest_range_depth {
3269 range.len() < smallest_range.len()
3270 } else {
3271 false
3272 }
3273 },
3274 ) {
3275 smallest_range_and_depth = Some((range, layer.depth));
3276 scope = Some(LanguageScope {
3277 language: layer.language.clone(),
3278 override_id: layer.override_id(offset, &self.text),
3279 });
3280 }
3281 }
3282 }
3283
3284 scope.or_else(|| {
3285 self.language.clone().map(|language| LanguageScope {
3286 language,
3287 override_id: None,
3288 })
3289 })
3290 }
3291
3292 /// Returns a tuple of the range and character kind of the word
3293 /// surrounding the given position.
3294 pub fn surrounding_word<T: ToOffset>(&self, start: T) -> (Range<usize>, Option<CharKind>) {
3295 let mut start = start.to_offset(self);
3296 let mut end = start;
3297 let mut next_chars = self.chars_at(start).take(128).peekable();
3298 let mut prev_chars = self.reversed_chars_at(start).take(128).peekable();
3299
3300 let classifier = self.char_classifier_at(start);
3301 let word_kind = cmp::max(
3302 prev_chars.peek().copied().map(|c| classifier.kind(c)),
3303 next_chars.peek().copied().map(|c| classifier.kind(c)),
3304 );
3305
3306 for ch in prev_chars {
3307 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3308 start -= ch.len_utf8();
3309 } else {
3310 break;
3311 }
3312 }
3313
3314 for ch in next_chars {
3315 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3316 end += ch.len_utf8();
3317 } else {
3318 break;
3319 }
3320 }
3321
3322 (start..end, word_kind)
3323 }
3324
3325 /// Returns the closest syntax node enclosing the given range.
3326 pub fn syntax_ancestor<'a, T: ToOffset>(
3327 &'a self,
3328 range: Range<T>,
3329 ) -> Option<tree_sitter::Node<'a>> {
3330 let range = range.start.to_offset(self)..range.end.to_offset(self);
3331 let mut result: Option<tree_sitter::Node<'a>> = None;
3332 'outer: for layer in self
3333 .syntax
3334 .layers_for_range(range.clone(), &self.text, true)
3335 {
3336 let mut cursor = layer.node().walk();
3337
3338 // Descend to the first leaf that touches the start of the range.
3339 //
3340 // If the range is non-empty and the current node ends exactly at the start,
3341 // move to the next sibling to find a node that extends beyond the start.
3342 //
3343 // If the range is empty and the current node starts after the range position,
3344 // move to the previous sibling to find the node that contains the position.
3345 while cursor.goto_first_child_for_byte(range.start).is_some() {
3346 if !range.is_empty() && cursor.node().end_byte() == range.start {
3347 cursor.goto_next_sibling();
3348 }
3349 if range.is_empty() && cursor.node().start_byte() > range.start {
3350 cursor.goto_previous_sibling();
3351 }
3352 }
3353
3354 // Ascend to the smallest ancestor that strictly contains the range.
3355 loop {
3356 let node_range = cursor.node().byte_range();
3357 if node_range.start <= range.start
3358 && node_range.end >= range.end
3359 && node_range.len() > range.len()
3360 {
3361 break;
3362 }
3363 if !cursor.goto_parent() {
3364 continue 'outer;
3365 }
3366 }
3367
3368 let left_node = cursor.node();
3369 let mut layer_result = left_node;
3370
3371 // For an empty range, try to find another node immediately to the right of the range.
3372 if left_node.end_byte() == range.start {
3373 let mut right_node = None;
3374 while !cursor.goto_next_sibling() {
3375 if !cursor.goto_parent() {
3376 break;
3377 }
3378 }
3379
3380 while cursor.node().start_byte() == range.start {
3381 right_node = Some(cursor.node());
3382 if !cursor.goto_first_child() {
3383 break;
3384 }
3385 }
3386
3387 // If there is a candidate node on both sides of the (empty) range, then
3388 // decide between the two by favoring a named node over an anonymous token.
3389 // If both nodes are the same in that regard, favor the right one.
3390 if let Some(right_node) = right_node {
3391 if right_node.is_named() || !left_node.is_named() {
3392 layer_result = right_node;
3393 }
3394 }
3395 }
3396
3397 if let Some(previous_result) = &result {
3398 if previous_result.byte_range().len() < layer_result.byte_range().len() {
3399 continue;
3400 }
3401 }
3402 result = Some(layer_result);
3403 }
3404
3405 result
3406 }
3407
3408 /// Returns the root syntax node within the given row
3409 pub fn syntax_root_ancestor(&self, position: Anchor) -> Option<tree_sitter::Node> {
3410 let start_offset = position.to_offset(self);
3411
3412 let row = self.summary_for_anchor::<text::PointUtf16>(&position).row as usize;
3413
3414 let layer = self
3415 .syntax
3416 .layers_for_range(start_offset..start_offset, &self.text, true)
3417 .next()?;
3418
3419 let mut cursor = layer.node().walk();
3420
3421 // Descend to the first leaf that touches the start of the range.
3422 while cursor.goto_first_child_for_byte(start_offset).is_some() {
3423 if cursor.node().end_byte() == start_offset {
3424 cursor.goto_next_sibling();
3425 }
3426 }
3427
3428 // Ascend to the root node within the same row.
3429 while cursor.goto_parent() {
3430 if cursor.node().start_position().row != row {
3431 break;
3432 }
3433 }
3434
3435 return Some(cursor.node());
3436 }
3437
3438 /// Returns the outline for the buffer.
3439 ///
3440 /// This method allows passing an optional [`SyntaxTheme`] to
3441 /// syntax-highlight the returned symbols.
3442 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Option<Outline<Anchor>> {
3443 self.outline_items_containing(0..self.len(), true, theme)
3444 .map(Outline::new)
3445 }
3446
3447 /// Returns all the symbols that contain the given position.
3448 ///
3449 /// This method allows passing an optional [`SyntaxTheme`] to
3450 /// syntax-highlight the returned symbols.
3451 pub fn symbols_containing<T: ToOffset>(
3452 &self,
3453 position: T,
3454 theme: Option<&SyntaxTheme>,
3455 ) -> Option<Vec<OutlineItem<Anchor>>> {
3456 let position = position.to_offset(self);
3457 let mut items = self.outline_items_containing(
3458 position.saturating_sub(1)..self.len().min(position + 1),
3459 false,
3460 theme,
3461 )?;
3462 let mut prev_depth = None;
3463 items.retain(|item| {
3464 let result = prev_depth.map_or(true, |prev_depth| item.depth > prev_depth);
3465 prev_depth = Some(item.depth);
3466 result
3467 });
3468 Some(items)
3469 }
3470
3471 pub fn outline_range_containing<T: ToOffset>(&self, range: Range<T>) -> Option<Range<Point>> {
3472 let range = range.to_offset(self);
3473 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3474 grammar.outline_config.as_ref().map(|c| &c.query)
3475 });
3476 let configs = matches
3477 .grammars()
3478 .iter()
3479 .map(|g| g.outline_config.as_ref().unwrap())
3480 .collect::<Vec<_>>();
3481
3482 while let Some(mat) = matches.peek() {
3483 let config = &configs[mat.grammar_index];
3484 let containing_item_node = maybe!({
3485 let item_node = mat.captures.iter().find_map(|cap| {
3486 if cap.index == config.item_capture_ix {
3487 Some(cap.node)
3488 } else {
3489 None
3490 }
3491 })?;
3492
3493 let item_byte_range = item_node.byte_range();
3494 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3495 None
3496 } else {
3497 Some(item_node)
3498 }
3499 });
3500
3501 if let Some(item_node) = containing_item_node {
3502 return Some(
3503 Point::from_ts_point(item_node.start_position())
3504 ..Point::from_ts_point(item_node.end_position()),
3505 );
3506 }
3507
3508 matches.advance();
3509 }
3510 None
3511 }
3512
3513 pub fn outline_items_containing<T: ToOffset>(
3514 &self,
3515 range: Range<T>,
3516 include_extra_context: bool,
3517 theme: Option<&SyntaxTheme>,
3518 ) -> Option<Vec<OutlineItem<Anchor>>> {
3519 let range = range.to_offset(self);
3520 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3521 grammar.outline_config.as_ref().map(|c| &c.query)
3522 });
3523 let configs = matches
3524 .grammars()
3525 .iter()
3526 .map(|g| g.outline_config.as_ref().unwrap())
3527 .collect::<Vec<_>>();
3528
3529 let mut items = Vec::new();
3530 let mut annotation_row_ranges: Vec<Range<u32>> = Vec::new();
3531 while let Some(mat) = matches.peek() {
3532 let config = &configs[mat.grammar_index];
3533 if let Some(item) =
3534 self.next_outline_item(config, &mat, &range, include_extra_context, theme)
3535 {
3536 items.push(item);
3537 } else if let Some(capture) = mat
3538 .captures
3539 .iter()
3540 .find(|capture| Some(capture.index) == config.annotation_capture_ix)
3541 {
3542 let capture_range = capture.node.start_position()..capture.node.end_position();
3543 let mut capture_row_range =
3544 capture_range.start.row as u32..capture_range.end.row as u32;
3545 if capture_range.end.row > capture_range.start.row && capture_range.end.column == 0
3546 {
3547 capture_row_range.end -= 1;
3548 }
3549 if let Some(last_row_range) = annotation_row_ranges.last_mut() {
3550 if last_row_range.end >= capture_row_range.start.saturating_sub(1) {
3551 last_row_range.end = capture_row_range.end;
3552 } else {
3553 annotation_row_ranges.push(capture_row_range);
3554 }
3555 } else {
3556 annotation_row_ranges.push(capture_row_range);
3557 }
3558 }
3559 matches.advance();
3560 }
3561
3562 items.sort_by_key(|item| (item.range.start, Reverse(item.range.end)));
3563
3564 // Assign depths based on containment relationships and convert to anchors.
3565 let mut item_ends_stack = Vec::<Point>::new();
3566 let mut anchor_items = Vec::new();
3567 let mut annotation_row_ranges = annotation_row_ranges.into_iter().peekable();
3568 for item in items {
3569 while let Some(last_end) = item_ends_stack.last().copied() {
3570 if last_end < item.range.end {
3571 item_ends_stack.pop();
3572 } else {
3573 break;
3574 }
3575 }
3576
3577 let mut annotation_row_range = None;
3578 while let Some(next_annotation_row_range) = annotation_row_ranges.peek() {
3579 let row_preceding_item = item.range.start.row.saturating_sub(1);
3580 if next_annotation_row_range.end < row_preceding_item {
3581 annotation_row_ranges.next();
3582 } else {
3583 if next_annotation_row_range.end == row_preceding_item {
3584 annotation_row_range = Some(next_annotation_row_range.clone());
3585 annotation_row_ranges.next();
3586 }
3587 break;
3588 }
3589 }
3590
3591 anchor_items.push(OutlineItem {
3592 depth: item_ends_stack.len(),
3593 range: self.anchor_after(item.range.start)..self.anchor_before(item.range.end),
3594 text: item.text,
3595 highlight_ranges: item.highlight_ranges,
3596 name_ranges: item.name_ranges,
3597 body_range: item.body_range.map(|body_range| {
3598 self.anchor_after(body_range.start)..self.anchor_before(body_range.end)
3599 }),
3600 annotation_range: annotation_row_range.map(|annotation_range| {
3601 self.anchor_after(Point::new(annotation_range.start, 0))
3602 ..self.anchor_before(Point::new(
3603 annotation_range.end,
3604 self.line_len(annotation_range.end),
3605 ))
3606 }),
3607 });
3608 item_ends_stack.push(item.range.end);
3609 }
3610
3611 Some(anchor_items)
3612 }
3613
3614 fn next_outline_item(
3615 &self,
3616 config: &OutlineConfig,
3617 mat: &SyntaxMapMatch,
3618 range: &Range<usize>,
3619 include_extra_context: bool,
3620 theme: Option<&SyntaxTheme>,
3621 ) -> Option<OutlineItem<Point>> {
3622 let item_node = mat.captures.iter().find_map(|cap| {
3623 if cap.index == config.item_capture_ix {
3624 Some(cap.node)
3625 } else {
3626 None
3627 }
3628 })?;
3629
3630 let item_byte_range = item_node.byte_range();
3631 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3632 return None;
3633 }
3634 let item_point_range = Point::from_ts_point(item_node.start_position())
3635 ..Point::from_ts_point(item_node.end_position());
3636
3637 let mut open_point = None;
3638 let mut close_point = None;
3639 let mut buffer_ranges = Vec::new();
3640 for capture in mat.captures {
3641 let node_is_name;
3642 if capture.index == config.name_capture_ix {
3643 node_is_name = true;
3644 } else if Some(capture.index) == config.context_capture_ix
3645 || (Some(capture.index) == config.extra_context_capture_ix && include_extra_context)
3646 {
3647 node_is_name = false;
3648 } else {
3649 if Some(capture.index) == config.open_capture_ix {
3650 open_point = Some(Point::from_ts_point(capture.node.end_position()));
3651 } else if Some(capture.index) == config.close_capture_ix {
3652 close_point = Some(Point::from_ts_point(capture.node.start_position()));
3653 }
3654
3655 continue;
3656 }
3657
3658 let mut range = capture.node.start_byte()..capture.node.end_byte();
3659 let start = capture.node.start_position();
3660 if capture.node.end_position().row > start.row {
3661 range.end = range.start + self.line_len(start.row as u32) as usize - start.column;
3662 }
3663
3664 if !range.is_empty() {
3665 buffer_ranges.push((range, node_is_name));
3666 }
3667 }
3668 if buffer_ranges.is_empty() {
3669 return None;
3670 }
3671 let mut text = String::new();
3672 let mut highlight_ranges = Vec::new();
3673 let mut name_ranges = Vec::new();
3674 let mut chunks = self.chunks(
3675 buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end,
3676 true,
3677 );
3678 let mut last_buffer_range_end = 0;
3679
3680 for (buffer_range, is_name) in buffer_ranges {
3681 let space_added = !text.is_empty() && buffer_range.start > last_buffer_range_end;
3682 if space_added {
3683 text.push(' ');
3684 }
3685 let before_append_len = text.len();
3686 let mut offset = buffer_range.start;
3687 chunks.seek(buffer_range.clone());
3688 for mut chunk in chunks.by_ref() {
3689 if chunk.text.len() > buffer_range.end - offset {
3690 chunk.text = &chunk.text[0..(buffer_range.end - offset)];
3691 offset = buffer_range.end;
3692 } else {
3693 offset += chunk.text.len();
3694 }
3695 let style = chunk
3696 .syntax_highlight_id
3697 .zip(theme)
3698 .and_then(|(highlight, theme)| highlight.style(theme));
3699 if let Some(style) = style {
3700 let start = text.len();
3701 let end = start + chunk.text.len();
3702 highlight_ranges.push((start..end, style));
3703 }
3704 text.push_str(chunk.text);
3705 if offset >= buffer_range.end {
3706 break;
3707 }
3708 }
3709 if is_name {
3710 let after_append_len = text.len();
3711 let start = if space_added && !name_ranges.is_empty() {
3712 before_append_len - 1
3713 } else {
3714 before_append_len
3715 };
3716 name_ranges.push(start..after_append_len);
3717 }
3718 last_buffer_range_end = buffer_range.end;
3719 }
3720
3721 Some(OutlineItem {
3722 depth: 0, // We'll calculate the depth later
3723 range: item_point_range,
3724 text,
3725 highlight_ranges,
3726 name_ranges,
3727 body_range: open_point.zip(close_point).map(|(start, end)| start..end),
3728 annotation_range: None,
3729 })
3730 }
3731
3732 pub fn function_body_fold_ranges<T: ToOffset>(
3733 &self,
3734 within: Range<T>,
3735 ) -> impl Iterator<Item = Range<usize>> + '_ {
3736 self.text_object_ranges(within, TreeSitterOptions::default())
3737 .filter_map(|(range, obj)| (obj == TextObject::InsideFunction).then_some(range))
3738 }
3739
3740 /// For each grammar in the language, runs the provided
3741 /// [`tree_sitter::Query`] against the given range.
3742 pub fn matches(
3743 &self,
3744 range: Range<usize>,
3745 query: fn(&Grammar) -> Option<&tree_sitter::Query>,
3746 ) -> SyntaxMapMatches {
3747 self.syntax.matches(range, self, query)
3748 }
3749
3750 pub fn all_bracket_ranges(
3751 &self,
3752 range: Range<usize>,
3753 ) -> impl Iterator<Item = BracketMatch> + '_ {
3754 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3755 grammar.brackets_config.as_ref().map(|c| &c.query)
3756 });
3757 let configs = matches
3758 .grammars()
3759 .iter()
3760 .map(|grammar| grammar.brackets_config.as_ref().unwrap())
3761 .collect::<Vec<_>>();
3762
3763 iter::from_fn(move || {
3764 while let Some(mat) = matches.peek() {
3765 let mut open = None;
3766 let mut close = None;
3767 let config = &configs[mat.grammar_index];
3768 let pattern = &config.patterns[mat.pattern_index];
3769 for capture in mat.captures {
3770 if capture.index == config.open_capture_ix {
3771 open = Some(capture.node.byte_range());
3772 } else if capture.index == config.close_capture_ix {
3773 close = Some(capture.node.byte_range());
3774 }
3775 }
3776
3777 matches.advance();
3778
3779 let Some((open_range, close_range)) = open.zip(close) else {
3780 continue;
3781 };
3782
3783 let bracket_range = open_range.start..=close_range.end;
3784 if !bracket_range.overlaps(&range) {
3785 continue;
3786 }
3787
3788 return Some(BracketMatch {
3789 open_range,
3790 close_range,
3791 newline_only: pattern.newline_only,
3792 });
3793 }
3794 None
3795 })
3796 }
3797
3798 /// Returns bracket range pairs overlapping or adjacent to `range`
3799 pub fn bracket_ranges<T: ToOffset>(
3800 &self,
3801 range: Range<T>,
3802 ) -> impl Iterator<Item = BracketMatch> + '_ {
3803 // Find bracket pairs that *inclusively* contain the given range.
3804 let range = range.start.to_offset(self).saturating_sub(1)
3805 ..self.len().min(range.end.to_offset(self) + 1);
3806 self.all_bracket_ranges(range)
3807 .filter(|pair| !pair.newline_only)
3808 }
3809
3810 pub fn text_object_ranges<T: ToOffset>(
3811 &self,
3812 range: Range<T>,
3813 options: TreeSitterOptions,
3814 ) -> impl Iterator<Item = (Range<usize>, TextObject)> + '_ {
3815 let range = range.start.to_offset(self).saturating_sub(1)
3816 ..self.len().min(range.end.to_offset(self) + 1);
3817
3818 let mut matches =
3819 self.syntax
3820 .matches_with_options(range.clone(), &self.text, options, |grammar| {
3821 grammar.text_object_config.as_ref().map(|c| &c.query)
3822 });
3823
3824 let configs = matches
3825 .grammars()
3826 .iter()
3827 .map(|grammar| grammar.text_object_config.as_ref())
3828 .collect::<Vec<_>>();
3829
3830 let mut captures = Vec::<(Range<usize>, TextObject)>::new();
3831
3832 iter::from_fn(move || {
3833 loop {
3834 while let Some(capture) = captures.pop() {
3835 if capture.0.overlaps(&range) {
3836 return Some(capture);
3837 }
3838 }
3839
3840 let mat = matches.peek()?;
3841
3842 let Some(config) = configs[mat.grammar_index].as_ref() else {
3843 matches.advance();
3844 continue;
3845 };
3846
3847 for capture in mat.captures {
3848 let Some(ix) = config
3849 .text_objects_by_capture_ix
3850 .binary_search_by_key(&capture.index, |e| e.0)
3851 .ok()
3852 else {
3853 continue;
3854 };
3855 let text_object = config.text_objects_by_capture_ix[ix].1;
3856 let byte_range = capture.node.byte_range();
3857
3858 let mut found = false;
3859 for (range, existing) in captures.iter_mut() {
3860 if existing == &text_object {
3861 range.start = range.start.min(byte_range.start);
3862 range.end = range.end.max(byte_range.end);
3863 found = true;
3864 break;
3865 }
3866 }
3867
3868 if !found {
3869 captures.push((byte_range, text_object));
3870 }
3871 }
3872
3873 matches.advance();
3874 }
3875 })
3876 }
3877
3878 /// Returns enclosing bracket ranges containing the given range
3879 pub fn enclosing_bracket_ranges<T: ToOffset>(
3880 &self,
3881 range: Range<T>,
3882 ) -> impl Iterator<Item = BracketMatch> + '_ {
3883 let range = range.start.to_offset(self)..range.end.to_offset(self);
3884
3885 self.bracket_ranges(range.clone()).filter(move |pair| {
3886 pair.open_range.start <= range.start && pair.close_range.end >= range.end
3887 })
3888 }
3889
3890 /// Returns the smallest enclosing bracket ranges containing the given range or None if no brackets contain range
3891 ///
3892 /// Can optionally pass a range_filter to filter the ranges of brackets to consider
3893 pub fn innermost_enclosing_bracket_ranges<T: ToOffset>(
3894 &self,
3895 range: Range<T>,
3896 range_filter: Option<&dyn Fn(Range<usize>, Range<usize>) -> bool>,
3897 ) -> Option<(Range<usize>, Range<usize>)> {
3898 let range = range.start.to_offset(self)..range.end.to_offset(self);
3899
3900 // Get the ranges of the innermost pair of brackets.
3901 let mut result: Option<(Range<usize>, Range<usize>)> = None;
3902
3903 for pair in self.enclosing_bracket_ranges(range.clone()) {
3904 if let Some(range_filter) = range_filter {
3905 if !range_filter(pair.open_range.clone(), pair.close_range.clone()) {
3906 continue;
3907 }
3908 }
3909
3910 let len = pair.close_range.end - pair.open_range.start;
3911
3912 if let Some((existing_open, existing_close)) = &result {
3913 let existing_len = existing_close.end - existing_open.start;
3914 if len > existing_len {
3915 continue;
3916 }
3917 }
3918
3919 result = Some((pair.open_range, pair.close_range));
3920 }
3921
3922 result
3923 }
3924
3925 /// Returns anchor ranges for any matches of the redaction query.
3926 /// The buffer can be associated with multiple languages, and the redaction query associated with each
3927 /// will be run on the relevant section of the buffer.
3928 pub fn redacted_ranges<T: ToOffset>(
3929 &self,
3930 range: Range<T>,
3931 ) -> impl Iterator<Item = Range<usize>> + '_ {
3932 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
3933 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
3934 grammar
3935 .redactions_config
3936 .as_ref()
3937 .map(|config| &config.query)
3938 });
3939
3940 let configs = syntax_matches
3941 .grammars()
3942 .iter()
3943 .map(|grammar| grammar.redactions_config.as_ref())
3944 .collect::<Vec<_>>();
3945
3946 iter::from_fn(move || {
3947 let redacted_range = syntax_matches
3948 .peek()
3949 .and_then(|mat| {
3950 configs[mat.grammar_index].and_then(|config| {
3951 mat.captures
3952 .iter()
3953 .find(|capture| capture.index == config.redaction_capture_ix)
3954 })
3955 })
3956 .map(|mat| mat.node.byte_range());
3957 syntax_matches.advance();
3958 redacted_range
3959 })
3960 }
3961
3962 pub fn injections_intersecting_range<T: ToOffset>(
3963 &self,
3964 range: Range<T>,
3965 ) -> impl Iterator<Item = (Range<usize>, &Arc<Language>)> + '_ {
3966 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
3967
3968 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
3969 grammar
3970 .injection_config
3971 .as_ref()
3972 .map(|config| &config.query)
3973 });
3974
3975 let configs = syntax_matches
3976 .grammars()
3977 .iter()
3978 .map(|grammar| grammar.injection_config.as_ref())
3979 .collect::<Vec<_>>();
3980
3981 iter::from_fn(move || {
3982 let ranges = syntax_matches.peek().and_then(|mat| {
3983 let config = &configs[mat.grammar_index]?;
3984 let content_capture_range = mat.captures.iter().find_map(|capture| {
3985 if capture.index == config.content_capture_ix {
3986 Some(capture.node.byte_range())
3987 } else {
3988 None
3989 }
3990 })?;
3991 let language = self.language_at(content_capture_range.start)?;
3992 Some((content_capture_range, language))
3993 });
3994 syntax_matches.advance();
3995 ranges
3996 })
3997 }
3998
3999 pub fn runnable_ranges(
4000 &self,
4001 offset_range: Range<usize>,
4002 ) -> impl Iterator<Item = RunnableRange> + '_ {
4003 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4004 grammar.runnable_config.as_ref().map(|config| &config.query)
4005 });
4006
4007 let test_configs = syntax_matches
4008 .grammars()
4009 .iter()
4010 .map(|grammar| grammar.runnable_config.as_ref())
4011 .collect::<Vec<_>>();
4012
4013 iter::from_fn(move || {
4014 loop {
4015 let mat = syntax_matches.peek()?;
4016
4017 let test_range = test_configs[mat.grammar_index].and_then(|test_configs| {
4018 let mut run_range = None;
4019 let full_range = mat.captures.iter().fold(
4020 Range {
4021 start: usize::MAX,
4022 end: 0,
4023 },
4024 |mut acc, next| {
4025 let byte_range = next.node.byte_range();
4026 if acc.start > byte_range.start {
4027 acc.start = byte_range.start;
4028 }
4029 if acc.end < byte_range.end {
4030 acc.end = byte_range.end;
4031 }
4032 acc
4033 },
4034 );
4035 if full_range.start > full_range.end {
4036 // We did not find a full spanning range of this match.
4037 return None;
4038 }
4039 let extra_captures: SmallVec<[_; 1]> =
4040 SmallVec::from_iter(mat.captures.iter().filter_map(|capture| {
4041 test_configs
4042 .extra_captures
4043 .get(capture.index as usize)
4044 .cloned()
4045 .and_then(|tag_name| match tag_name {
4046 RunnableCapture::Named(name) => {
4047 Some((capture.node.byte_range(), name))
4048 }
4049 RunnableCapture::Run => {
4050 let _ = run_range.insert(capture.node.byte_range());
4051 None
4052 }
4053 })
4054 }));
4055 let run_range = run_range?;
4056 let tags = test_configs
4057 .query
4058 .property_settings(mat.pattern_index)
4059 .iter()
4060 .filter_map(|property| {
4061 if *property.key == *"tag" {
4062 property
4063 .value
4064 .as_ref()
4065 .map(|value| RunnableTag(value.to_string().into()))
4066 } else {
4067 None
4068 }
4069 })
4070 .collect();
4071 let extra_captures = extra_captures
4072 .into_iter()
4073 .map(|(range, name)| {
4074 (
4075 name.to_string(),
4076 self.text_for_range(range.clone()).collect::<String>(),
4077 )
4078 })
4079 .collect();
4080 // All tags should have the same range.
4081 Some(RunnableRange {
4082 run_range,
4083 full_range,
4084 runnable: Runnable {
4085 tags,
4086 language: mat.language,
4087 buffer: self.remote_id(),
4088 },
4089 extra_captures,
4090 buffer_id: self.remote_id(),
4091 })
4092 });
4093
4094 syntax_matches.advance();
4095 if test_range.is_some() {
4096 // It's fine for us to short-circuit on .peek()? returning None. We don't want to return None from this iter if we
4097 // had a capture that did not contain a run marker, hence we'll just loop around for the next capture.
4098 return test_range;
4099 }
4100 }
4101 })
4102 }
4103
4104 /// Returns selections for remote peers intersecting the given range.
4105 #[allow(clippy::type_complexity)]
4106 pub fn selections_in_range(
4107 &self,
4108 range: Range<Anchor>,
4109 include_local: bool,
4110 ) -> impl Iterator<
4111 Item = (
4112 ReplicaId,
4113 bool,
4114 CursorShape,
4115 impl Iterator<Item = &Selection<Anchor>> + '_,
4116 ),
4117 > + '_ {
4118 self.remote_selections
4119 .iter()
4120 .filter(move |(replica_id, set)| {
4121 (include_local || **replica_id != self.text.replica_id())
4122 && !set.selections.is_empty()
4123 })
4124 .map(move |(replica_id, set)| {
4125 let start_ix = match set.selections.binary_search_by(|probe| {
4126 probe.end.cmp(&range.start, self).then(Ordering::Greater)
4127 }) {
4128 Ok(ix) | Err(ix) => ix,
4129 };
4130 let end_ix = match set.selections.binary_search_by(|probe| {
4131 probe.start.cmp(&range.end, self).then(Ordering::Less)
4132 }) {
4133 Ok(ix) | Err(ix) => ix,
4134 };
4135
4136 (
4137 *replica_id,
4138 set.line_mode,
4139 set.cursor_shape,
4140 set.selections[start_ix..end_ix].iter(),
4141 )
4142 })
4143 }
4144
4145 /// Returns if the buffer contains any diagnostics.
4146 pub fn has_diagnostics(&self) -> bool {
4147 !self.diagnostics.is_empty()
4148 }
4149
4150 /// Returns all the diagnostics intersecting the given range.
4151 pub fn diagnostics_in_range<'a, T, O>(
4152 &'a self,
4153 search_range: Range<T>,
4154 reversed: bool,
4155 ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
4156 where
4157 T: 'a + Clone + ToOffset,
4158 O: 'a + FromAnchor,
4159 {
4160 let mut iterators: Vec<_> = self
4161 .diagnostics
4162 .iter()
4163 .map(|(_, collection)| {
4164 collection
4165 .range::<T, text::Anchor>(search_range.clone(), self, true, reversed)
4166 .peekable()
4167 })
4168 .collect();
4169
4170 std::iter::from_fn(move || {
4171 let (next_ix, _) = iterators
4172 .iter_mut()
4173 .enumerate()
4174 .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
4175 .min_by(|(_, a), (_, b)| {
4176 let cmp = a
4177 .range
4178 .start
4179 .cmp(&b.range.start, self)
4180 // when range is equal, sort by diagnostic severity
4181 .then(a.diagnostic.severity.cmp(&b.diagnostic.severity))
4182 // and stabilize order with group_id
4183 .then(a.diagnostic.group_id.cmp(&b.diagnostic.group_id));
4184 if reversed { cmp.reverse() } else { cmp }
4185 })?;
4186 iterators[next_ix]
4187 .next()
4188 .map(|DiagnosticEntry { range, diagnostic }| DiagnosticEntry {
4189 diagnostic,
4190 range: FromAnchor::from_anchor(&range.start, self)
4191 ..FromAnchor::from_anchor(&range.end, self),
4192 })
4193 })
4194 }
4195
4196 /// Returns all the diagnostic groups associated with the given
4197 /// language server ID. If no language server ID is provided,
4198 /// all diagnostics groups are returned.
4199 pub fn diagnostic_groups(
4200 &self,
4201 language_server_id: Option<LanguageServerId>,
4202 ) -> Vec<(LanguageServerId, DiagnosticGroup<Anchor>)> {
4203 let mut groups = Vec::new();
4204
4205 if let Some(language_server_id) = language_server_id {
4206 if let Ok(ix) = self
4207 .diagnostics
4208 .binary_search_by_key(&language_server_id, |e| e.0)
4209 {
4210 self.diagnostics[ix]
4211 .1
4212 .groups(language_server_id, &mut groups, self);
4213 }
4214 } else {
4215 for (language_server_id, diagnostics) in self.diagnostics.iter() {
4216 diagnostics.groups(*language_server_id, &mut groups, self);
4217 }
4218 }
4219
4220 groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
4221 let a_start = &group_a.entries[group_a.primary_ix].range.start;
4222 let b_start = &group_b.entries[group_b.primary_ix].range.start;
4223 a_start.cmp(b_start, self).then_with(|| id_a.cmp(id_b))
4224 });
4225
4226 groups
4227 }
4228
4229 /// Returns an iterator over the diagnostics for the given group.
4230 pub fn diagnostic_group<O>(
4231 &self,
4232 group_id: usize,
4233 ) -> impl Iterator<Item = DiagnosticEntry<O>> + '_
4234 where
4235 O: FromAnchor + 'static,
4236 {
4237 self.diagnostics
4238 .iter()
4239 .flat_map(move |(_, set)| set.group(group_id, self))
4240 }
4241
4242 /// An integer version number that accounts for all updates besides
4243 /// the buffer's text itself (which is versioned via a version vector).
4244 pub fn non_text_state_update_count(&self) -> usize {
4245 self.non_text_state_update_count
4246 }
4247
4248 /// Returns a snapshot of underlying file.
4249 pub fn file(&self) -> Option<&Arc<dyn File>> {
4250 self.file.as_ref()
4251 }
4252
4253 /// Resolves the file path (relative to the worktree root) associated with the underlying file.
4254 pub fn resolve_file_path(&self, cx: &App, include_root: bool) -> Option<PathBuf> {
4255 if let Some(file) = self.file() {
4256 if file.path().file_name().is_none() || include_root {
4257 Some(file.full_path(cx))
4258 } else {
4259 Some(file.path().to_path_buf())
4260 }
4261 } else {
4262 None
4263 }
4264 }
4265
4266 pub fn words_in_range(&self, query: WordsQuery) -> BTreeMap<String, Range<Anchor>> {
4267 let query_str = query.fuzzy_contents;
4268 if query_str.map_or(false, |query| query.is_empty()) {
4269 return BTreeMap::default();
4270 }
4271
4272 let classifier = CharClassifier::new(self.language.clone().map(|language| LanguageScope {
4273 language,
4274 override_id: None,
4275 }));
4276
4277 let mut query_ix = 0;
4278 let query_chars = query_str.map(|query| query.chars().collect::<Vec<_>>());
4279 let query_len = query_chars.as_ref().map_or(0, |query| query.len());
4280
4281 let mut words = BTreeMap::default();
4282 let mut current_word_start_ix = None;
4283 let mut chunk_ix = query.range.start;
4284 for chunk in self.chunks(query.range, false) {
4285 for (i, c) in chunk.text.char_indices() {
4286 let ix = chunk_ix + i;
4287 if classifier.is_word(c) {
4288 if current_word_start_ix.is_none() {
4289 current_word_start_ix = Some(ix);
4290 }
4291
4292 if let Some(query_chars) = &query_chars {
4293 if query_ix < query_len {
4294 if c.to_lowercase().eq(query_chars[query_ix].to_lowercase()) {
4295 query_ix += 1;
4296 }
4297 }
4298 }
4299 continue;
4300 } else if let Some(word_start) = current_word_start_ix.take() {
4301 if query_ix == query_len {
4302 let word_range = self.anchor_before(word_start)..self.anchor_after(ix);
4303 let mut word_text = self.text_for_range(word_start..ix).peekable();
4304 let first_char = word_text
4305 .peek()
4306 .and_then(|first_chunk| first_chunk.chars().next());
4307 // Skip empty and "words" starting with digits as a heuristic to reduce useless completions
4308 if !query.skip_digits
4309 || first_char.map_or(true, |first_char| !first_char.is_digit(10))
4310 {
4311 words.insert(word_text.collect(), word_range);
4312 }
4313 }
4314 }
4315 query_ix = 0;
4316 }
4317 chunk_ix += chunk.text.len();
4318 }
4319
4320 words
4321 }
4322}
4323
4324pub struct WordsQuery<'a> {
4325 /// Only returns words with all chars from the fuzzy string in them.
4326 pub fuzzy_contents: Option<&'a str>,
4327 /// Skips words that start with a digit.
4328 pub skip_digits: bool,
4329 /// Buffer offset range, to look for words.
4330 pub range: Range<usize>,
4331}
4332
4333fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
4334 indent_size_for_text(text.chars_at(Point::new(row, 0)))
4335}
4336
4337fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
4338 let mut result = IndentSize::spaces(0);
4339 for c in text {
4340 let kind = match c {
4341 ' ' => IndentKind::Space,
4342 '\t' => IndentKind::Tab,
4343 _ => break,
4344 };
4345 if result.len == 0 {
4346 result.kind = kind;
4347 }
4348 result.len += 1;
4349 }
4350 result
4351}
4352
4353impl Clone for BufferSnapshot {
4354 fn clone(&self) -> Self {
4355 Self {
4356 text: self.text.clone(),
4357 syntax: self.syntax.clone(),
4358 file: self.file.clone(),
4359 remote_selections: self.remote_selections.clone(),
4360 diagnostics: self.diagnostics.clone(),
4361 language: self.language.clone(),
4362 non_text_state_update_count: self.non_text_state_update_count,
4363 }
4364 }
4365}
4366
4367impl Deref for BufferSnapshot {
4368 type Target = text::BufferSnapshot;
4369
4370 fn deref(&self) -> &Self::Target {
4371 &self.text
4372 }
4373}
4374
4375unsafe impl Send for BufferChunks<'_> {}
4376
4377impl<'a> BufferChunks<'a> {
4378 pub(crate) fn new(
4379 text: &'a Rope,
4380 range: Range<usize>,
4381 syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
4382 diagnostics: bool,
4383 buffer_snapshot: Option<&'a BufferSnapshot>,
4384 ) -> Self {
4385 let mut highlights = None;
4386 if let Some((captures, highlight_maps)) = syntax {
4387 highlights = Some(BufferChunkHighlights {
4388 captures,
4389 next_capture: None,
4390 stack: Default::default(),
4391 highlight_maps,
4392 })
4393 }
4394
4395 let diagnostic_endpoints = diagnostics.then(|| Vec::new().into_iter().peekable());
4396 let chunks = text.chunks_in_range(range.clone());
4397
4398 let mut this = BufferChunks {
4399 range,
4400 buffer_snapshot,
4401 chunks,
4402 diagnostic_endpoints,
4403 error_depth: 0,
4404 warning_depth: 0,
4405 information_depth: 0,
4406 hint_depth: 0,
4407 unnecessary_depth: 0,
4408 underline: true,
4409 highlights,
4410 };
4411 this.initialize_diagnostic_endpoints();
4412 this
4413 }
4414
4415 /// Seeks to the given byte offset in the buffer.
4416 pub fn seek(&mut self, range: Range<usize>) {
4417 let old_range = std::mem::replace(&mut self.range, range.clone());
4418 self.chunks.set_range(self.range.clone());
4419 if let Some(highlights) = self.highlights.as_mut() {
4420 if old_range.start <= self.range.start && old_range.end >= self.range.end {
4421 // Reuse existing highlights stack, as the new range is a subrange of the old one.
4422 highlights
4423 .stack
4424 .retain(|(end_offset, _)| *end_offset > range.start);
4425 if let Some(capture) = &highlights.next_capture {
4426 if range.start >= capture.node.start_byte() {
4427 let next_capture_end = capture.node.end_byte();
4428 if range.start < next_capture_end {
4429 highlights.stack.push((
4430 next_capture_end,
4431 highlights.highlight_maps[capture.grammar_index].get(capture.index),
4432 ));
4433 }
4434 highlights.next_capture.take();
4435 }
4436 }
4437 } else if let Some(snapshot) = self.buffer_snapshot {
4438 let (captures, highlight_maps) = snapshot.get_highlights(self.range.clone());
4439 *highlights = BufferChunkHighlights {
4440 captures,
4441 next_capture: None,
4442 stack: Default::default(),
4443 highlight_maps,
4444 };
4445 } else {
4446 // We cannot obtain new highlights for a language-aware buffer iterator, as we don't have a buffer snapshot.
4447 // Seeking such BufferChunks is not supported.
4448 debug_assert!(
4449 false,
4450 "Attempted to seek on a language-aware buffer iterator without associated buffer snapshot"
4451 );
4452 }
4453
4454 highlights.captures.set_byte_range(self.range.clone());
4455 self.initialize_diagnostic_endpoints();
4456 }
4457 }
4458
4459 fn initialize_diagnostic_endpoints(&mut self) {
4460 if let Some(diagnostics) = self.diagnostic_endpoints.as_mut() {
4461 if let Some(buffer) = self.buffer_snapshot {
4462 let mut diagnostic_endpoints = Vec::new();
4463 for entry in buffer.diagnostics_in_range::<_, usize>(self.range.clone(), false) {
4464 diagnostic_endpoints.push(DiagnosticEndpoint {
4465 offset: entry.range.start,
4466 is_start: true,
4467 severity: entry.diagnostic.severity,
4468 is_unnecessary: entry.diagnostic.is_unnecessary,
4469 underline: entry.diagnostic.underline,
4470 });
4471 diagnostic_endpoints.push(DiagnosticEndpoint {
4472 offset: entry.range.end,
4473 is_start: false,
4474 severity: entry.diagnostic.severity,
4475 is_unnecessary: entry.diagnostic.is_unnecessary,
4476 underline: entry.diagnostic.underline,
4477 });
4478 }
4479 diagnostic_endpoints
4480 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
4481 *diagnostics = diagnostic_endpoints.into_iter().peekable();
4482 self.hint_depth = 0;
4483 self.error_depth = 0;
4484 self.warning_depth = 0;
4485 self.information_depth = 0;
4486 }
4487 }
4488 }
4489
4490 /// The current byte offset in the buffer.
4491 pub fn offset(&self) -> usize {
4492 self.range.start
4493 }
4494
4495 pub fn range(&self) -> Range<usize> {
4496 self.range.clone()
4497 }
4498
4499 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
4500 let depth = match endpoint.severity {
4501 DiagnosticSeverity::ERROR => &mut self.error_depth,
4502 DiagnosticSeverity::WARNING => &mut self.warning_depth,
4503 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
4504 DiagnosticSeverity::HINT => &mut self.hint_depth,
4505 _ => return,
4506 };
4507 if endpoint.is_start {
4508 *depth += 1;
4509 } else {
4510 *depth -= 1;
4511 }
4512
4513 if endpoint.is_unnecessary {
4514 if endpoint.is_start {
4515 self.unnecessary_depth += 1;
4516 } else {
4517 self.unnecessary_depth -= 1;
4518 }
4519 }
4520 }
4521
4522 fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
4523 if self.error_depth > 0 {
4524 Some(DiagnosticSeverity::ERROR)
4525 } else if self.warning_depth > 0 {
4526 Some(DiagnosticSeverity::WARNING)
4527 } else if self.information_depth > 0 {
4528 Some(DiagnosticSeverity::INFORMATION)
4529 } else if self.hint_depth > 0 {
4530 Some(DiagnosticSeverity::HINT)
4531 } else {
4532 None
4533 }
4534 }
4535
4536 fn current_code_is_unnecessary(&self) -> bool {
4537 self.unnecessary_depth > 0
4538 }
4539}
4540
4541impl<'a> Iterator for BufferChunks<'a> {
4542 type Item = Chunk<'a>;
4543
4544 fn next(&mut self) -> Option<Self::Item> {
4545 let mut next_capture_start = usize::MAX;
4546 let mut next_diagnostic_endpoint = usize::MAX;
4547
4548 if let Some(highlights) = self.highlights.as_mut() {
4549 while let Some((parent_capture_end, _)) = highlights.stack.last() {
4550 if *parent_capture_end <= self.range.start {
4551 highlights.stack.pop();
4552 } else {
4553 break;
4554 }
4555 }
4556
4557 if highlights.next_capture.is_none() {
4558 highlights.next_capture = highlights.captures.next();
4559 }
4560
4561 while let Some(capture) = highlights.next_capture.as_ref() {
4562 if self.range.start < capture.node.start_byte() {
4563 next_capture_start = capture.node.start_byte();
4564 break;
4565 } else {
4566 let highlight_id =
4567 highlights.highlight_maps[capture.grammar_index].get(capture.index);
4568 highlights
4569 .stack
4570 .push((capture.node.end_byte(), highlight_id));
4571 highlights.next_capture = highlights.captures.next();
4572 }
4573 }
4574 }
4575
4576 let mut diagnostic_endpoints = std::mem::take(&mut self.diagnostic_endpoints);
4577 if let Some(diagnostic_endpoints) = diagnostic_endpoints.as_mut() {
4578 while let Some(endpoint) = diagnostic_endpoints.peek().copied() {
4579 if endpoint.offset <= self.range.start {
4580 self.update_diagnostic_depths(endpoint);
4581 diagnostic_endpoints.next();
4582 self.underline = endpoint.underline;
4583 } else {
4584 next_diagnostic_endpoint = endpoint.offset;
4585 break;
4586 }
4587 }
4588 }
4589 self.diagnostic_endpoints = diagnostic_endpoints;
4590
4591 if let Some(chunk) = self.chunks.peek() {
4592 let chunk_start = self.range.start;
4593 let mut chunk_end = (self.chunks.offset() + chunk.len())
4594 .min(next_capture_start)
4595 .min(next_diagnostic_endpoint);
4596 let mut highlight_id = None;
4597 if let Some(highlights) = self.highlights.as_ref() {
4598 if let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last() {
4599 chunk_end = chunk_end.min(*parent_capture_end);
4600 highlight_id = Some(*parent_highlight_id);
4601 }
4602 }
4603
4604 let slice =
4605 &chunk[chunk_start - self.chunks.offset()..chunk_end - self.chunks.offset()];
4606 self.range.start = chunk_end;
4607 if self.range.start == self.chunks.offset() + chunk.len() {
4608 self.chunks.next().unwrap();
4609 }
4610
4611 Some(Chunk {
4612 text: slice,
4613 syntax_highlight_id: highlight_id,
4614 underline: self.underline,
4615 diagnostic_severity: self.current_diagnostic_severity(),
4616 is_unnecessary: self.current_code_is_unnecessary(),
4617 ..Chunk::default()
4618 })
4619 } else {
4620 None
4621 }
4622 }
4623}
4624
4625impl operation_queue::Operation for Operation {
4626 fn lamport_timestamp(&self) -> clock::Lamport {
4627 match self {
4628 Operation::Buffer(_) => {
4629 unreachable!("buffer operations should never be deferred at this layer")
4630 }
4631 Operation::UpdateDiagnostics {
4632 lamport_timestamp, ..
4633 }
4634 | Operation::UpdateSelections {
4635 lamport_timestamp, ..
4636 }
4637 | Operation::UpdateCompletionTriggers {
4638 lamport_timestamp, ..
4639 } => *lamport_timestamp,
4640 }
4641 }
4642}
4643
4644impl Default for Diagnostic {
4645 fn default() -> Self {
4646 Self {
4647 source: Default::default(),
4648 source_kind: DiagnosticSourceKind::Other,
4649 code: None,
4650 code_description: None,
4651 severity: DiagnosticSeverity::ERROR,
4652 message: Default::default(),
4653 markdown: None,
4654 group_id: 0,
4655 is_primary: false,
4656 is_disk_based: false,
4657 is_unnecessary: false,
4658 underline: true,
4659 data: None,
4660 }
4661 }
4662}
4663
4664impl IndentSize {
4665 /// Returns an [`IndentSize`] representing the given spaces.
4666 pub fn spaces(len: u32) -> Self {
4667 Self {
4668 len,
4669 kind: IndentKind::Space,
4670 }
4671 }
4672
4673 /// Returns an [`IndentSize`] representing a tab.
4674 pub fn tab() -> Self {
4675 Self {
4676 len: 1,
4677 kind: IndentKind::Tab,
4678 }
4679 }
4680
4681 /// An iterator over the characters represented by this [`IndentSize`].
4682 pub fn chars(&self) -> impl Iterator<Item = char> {
4683 iter::repeat(self.char()).take(self.len as usize)
4684 }
4685
4686 /// The character representation of this [`IndentSize`].
4687 pub fn char(&self) -> char {
4688 match self.kind {
4689 IndentKind::Space => ' ',
4690 IndentKind::Tab => '\t',
4691 }
4692 }
4693
4694 /// Consumes the current [`IndentSize`] and returns a new one that has
4695 /// been shrunk or enlarged by the given size along the given direction.
4696 pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
4697 match direction {
4698 Ordering::Less => {
4699 if self.kind == size.kind && self.len >= size.len {
4700 self.len -= size.len;
4701 }
4702 }
4703 Ordering::Equal => {}
4704 Ordering::Greater => {
4705 if self.len == 0 {
4706 self = size;
4707 } else if self.kind == size.kind {
4708 self.len += size.len;
4709 }
4710 }
4711 }
4712 self
4713 }
4714
4715 pub fn len_with_expanded_tabs(&self, tab_size: NonZeroU32) -> usize {
4716 match self.kind {
4717 IndentKind::Space => self.len as usize,
4718 IndentKind::Tab => self.len as usize * tab_size.get() as usize,
4719 }
4720 }
4721}
4722
4723#[cfg(any(test, feature = "test-support"))]
4724pub struct TestFile {
4725 pub path: Arc<Path>,
4726 pub root_name: String,
4727 pub local_root: Option<PathBuf>,
4728}
4729
4730#[cfg(any(test, feature = "test-support"))]
4731impl File for TestFile {
4732 fn path(&self) -> &Arc<Path> {
4733 &self.path
4734 }
4735
4736 fn full_path(&self, _: &gpui::App) -> PathBuf {
4737 PathBuf::from(&self.root_name).join(self.path.as_ref())
4738 }
4739
4740 fn as_local(&self) -> Option<&dyn LocalFile> {
4741 if self.local_root.is_some() {
4742 Some(self)
4743 } else {
4744 None
4745 }
4746 }
4747
4748 fn disk_state(&self) -> DiskState {
4749 unimplemented!()
4750 }
4751
4752 fn file_name<'a>(&'a self, _: &'a gpui::App) -> &'a std::ffi::OsStr {
4753 self.path().file_name().unwrap_or(self.root_name.as_ref())
4754 }
4755
4756 fn worktree_id(&self, _: &App) -> WorktreeId {
4757 WorktreeId::from_usize(0)
4758 }
4759
4760 fn to_proto(&self, _: &App) -> rpc::proto::File {
4761 unimplemented!()
4762 }
4763
4764 fn is_private(&self) -> bool {
4765 false
4766 }
4767}
4768
4769#[cfg(any(test, feature = "test-support"))]
4770impl LocalFile for TestFile {
4771 fn abs_path(&self, _cx: &App) -> PathBuf {
4772 PathBuf::from(self.local_root.as_ref().unwrap())
4773 .join(&self.root_name)
4774 .join(self.path.as_ref())
4775 }
4776
4777 fn load(&self, _cx: &App) -> Task<Result<String>> {
4778 unimplemented!()
4779 }
4780
4781 fn load_bytes(&self, _cx: &App) -> Task<Result<Vec<u8>>> {
4782 unimplemented!()
4783 }
4784}
4785
4786pub(crate) fn contiguous_ranges(
4787 values: impl Iterator<Item = u32>,
4788 max_len: usize,
4789) -> impl Iterator<Item = Range<u32>> {
4790 let mut values = values;
4791 let mut current_range: Option<Range<u32>> = None;
4792 std::iter::from_fn(move || {
4793 loop {
4794 if let Some(value) = values.next() {
4795 if let Some(range) = &mut current_range {
4796 if value == range.end && range.len() < max_len {
4797 range.end += 1;
4798 continue;
4799 }
4800 }
4801
4802 let prev_range = current_range.clone();
4803 current_range = Some(value..(value + 1));
4804 if prev_range.is_some() {
4805 return prev_range;
4806 }
4807 } else {
4808 return current_range.take();
4809 }
4810 }
4811 })
4812}
4813
4814#[derive(Default, Debug)]
4815pub struct CharClassifier {
4816 scope: Option<LanguageScope>,
4817 for_completion: bool,
4818 ignore_punctuation: bool,
4819}
4820
4821impl CharClassifier {
4822 pub fn new(scope: Option<LanguageScope>) -> Self {
4823 Self {
4824 scope,
4825 for_completion: false,
4826 ignore_punctuation: false,
4827 }
4828 }
4829
4830 pub fn for_completion(self, for_completion: bool) -> Self {
4831 Self {
4832 for_completion,
4833 ..self
4834 }
4835 }
4836
4837 pub fn ignore_punctuation(self, ignore_punctuation: bool) -> Self {
4838 Self {
4839 ignore_punctuation,
4840 ..self
4841 }
4842 }
4843
4844 pub fn is_whitespace(&self, c: char) -> bool {
4845 self.kind(c) == CharKind::Whitespace
4846 }
4847
4848 pub fn is_word(&self, c: char) -> bool {
4849 self.kind(c) == CharKind::Word
4850 }
4851
4852 pub fn is_punctuation(&self, c: char) -> bool {
4853 self.kind(c) == CharKind::Punctuation
4854 }
4855
4856 pub fn kind_with(&self, c: char, ignore_punctuation: bool) -> CharKind {
4857 if c.is_alphanumeric() || c == '_' {
4858 return CharKind::Word;
4859 }
4860
4861 if let Some(scope) = &self.scope {
4862 let characters = if self.for_completion {
4863 scope.completion_query_characters()
4864 } else {
4865 scope.word_characters()
4866 };
4867 if let Some(characters) = characters {
4868 if characters.contains(&c) {
4869 return CharKind::Word;
4870 }
4871 }
4872 }
4873
4874 if c.is_whitespace() {
4875 return CharKind::Whitespace;
4876 }
4877
4878 if ignore_punctuation {
4879 CharKind::Word
4880 } else {
4881 CharKind::Punctuation
4882 }
4883 }
4884
4885 pub fn kind(&self, c: char) -> CharKind {
4886 self.kind_with(c, self.ignore_punctuation)
4887 }
4888}
4889
4890/// Find all of the ranges of whitespace that occur at the ends of lines
4891/// in the given rope.
4892///
4893/// This could also be done with a regex search, but this implementation
4894/// avoids copying text.
4895pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
4896 let mut ranges = Vec::new();
4897
4898 let mut offset = 0;
4899 let mut prev_chunk_trailing_whitespace_range = 0..0;
4900 for chunk in rope.chunks() {
4901 let mut prev_line_trailing_whitespace_range = 0..0;
4902 for (i, line) in chunk.split('\n').enumerate() {
4903 let line_end_offset = offset + line.len();
4904 let trimmed_line_len = line.trim_end_matches([' ', '\t']).len();
4905 let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
4906
4907 if i == 0 && trimmed_line_len == 0 {
4908 trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
4909 }
4910 if !prev_line_trailing_whitespace_range.is_empty() {
4911 ranges.push(prev_line_trailing_whitespace_range);
4912 }
4913
4914 offset = line_end_offset + 1;
4915 prev_line_trailing_whitespace_range = trailing_whitespace_range;
4916 }
4917
4918 offset -= 1;
4919 prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
4920 }
4921
4922 if !prev_chunk_trailing_whitespace_range.is_empty() {
4923 ranges.push(prev_chunk_trailing_whitespace_range);
4924 }
4925
4926 ranges
4927}