1use crate::{
2 DebugVariableCapture, LanguageScope, Outline, OutlineConfig, RunnableCapture, RunnableTag,
3 TextObject, TreeSitterOptions,
4 diagnostic_set::{DiagnosticEntry, DiagnosticGroup},
5 language_settings::{LanguageSettings, language_settings},
6 outline::OutlineItem,
7 syntax_map::{
8 SyntaxLayer, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures, SyntaxMapMatch,
9 SyntaxMapMatches, SyntaxSnapshot, ToTreeSitterPoint,
10 },
11 task_context::RunnableRange,
12 text_diff::text_diff,
13};
14pub use crate::{
15 Grammar, Language, LanguageRegistry,
16 diagnostic_set::DiagnosticSet,
17 highlight_map::{HighlightId, HighlightMap},
18 proto,
19};
20use anyhow::{Context as _, Result, anyhow};
21use async_watch as watch;
22pub use clock::ReplicaId;
23use clock::{AGENT_REPLICA_ID, Lamport};
24use collections::HashMap;
25use fs::MTime;
26use futures::channel::oneshot;
27use gpui::{
28 App, AppContext as _, Context, Entity, EventEmitter, HighlightStyle, SharedString, StyledText,
29 Task, TaskLabel, TextStyle,
30};
31use lsp::{LanguageServerId, NumberOrString};
32use parking_lot::Mutex;
33use schemars::JsonSchema;
34use serde::{Deserialize, Serialize};
35use serde_json::Value;
36use settings::WorktreeId;
37use smallvec::SmallVec;
38use smol::future::yield_now;
39use std::{
40 any::Any,
41 borrow::Cow,
42 cell::Cell,
43 cmp::{self, Ordering, Reverse},
44 collections::{BTreeMap, BTreeSet},
45 ffi::OsStr,
46 future::Future,
47 iter::{self, Iterator, Peekable},
48 mem,
49 num::NonZeroU32,
50 ops::{Deref, Range},
51 path::{Path, PathBuf},
52 rc,
53 sync::{Arc, LazyLock},
54 time::{Duration, Instant},
55 vec,
56};
57use sum_tree::TreeMap;
58use text::operation_queue::OperationQueue;
59use text::*;
60pub use text::{
61 Anchor, Bias, Buffer as TextBuffer, BufferId, BufferSnapshot as TextBufferSnapshot, Edit,
62 LineIndent, OffsetRangeExt, OffsetUtf16, Patch, Point, PointUtf16, Rope, Selection,
63 SelectionGoal, Subscription, TextDimension, TextSummary, ToOffset, ToOffsetUtf16, ToPoint,
64 ToPointUtf16, Transaction, TransactionId, Unclipped,
65};
66use theme::{ActiveTheme as _, SyntaxTheme};
67#[cfg(any(test, feature = "test-support"))]
68use util::RandomCharIter;
69use util::{RangeExt, debug_panic, maybe};
70
71#[cfg(any(test, feature = "test-support"))]
72pub use {tree_sitter_rust, tree_sitter_typescript};
73
74pub use lsp::DiagnosticSeverity;
75
76#[derive(Debug)]
77pub struct DebugVariableRanges {
78 pub buffer_id: BufferId,
79 pub range: Range<usize>,
80}
81
82/// A label for the background task spawned by the buffer to compute
83/// a diff against the contents of its file.
84pub static BUFFER_DIFF_TASK: LazyLock<TaskLabel> = LazyLock::new(TaskLabel::new);
85
86/// Indicate whether a [`Buffer`] has permissions to edit.
87#[derive(PartialEq, Clone, Copy, Debug)]
88pub enum Capability {
89 /// The buffer is a mutable replica.
90 ReadWrite,
91 /// The buffer is a read-only replica.
92 ReadOnly,
93}
94
95pub type BufferRow = u32;
96
97/// An in-memory representation of a source code file, including its text,
98/// syntax trees, git status, and diagnostics.
99pub struct Buffer {
100 text: TextBuffer,
101 branch_state: Option<BufferBranchState>,
102 /// Filesystem state, `None` when there is no path.
103 file: Option<Arc<dyn File>>,
104 /// The mtime of the file when this buffer was last loaded from
105 /// or saved to disk.
106 saved_mtime: Option<MTime>,
107 /// The version vector when this buffer was last loaded from
108 /// or saved to disk.
109 saved_version: clock::Global,
110 preview_version: clock::Global,
111 transaction_depth: usize,
112 was_dirty_before_starting_transaction: Option<bool>,
113 reload_task: Option<Task<Result<()>>>,
114 language: Option<Arc<Language>>,
115 autoindent_requests: Vec<Arc<AutoindentRequest>>,
116 pending_autoindent: Option<Task<()>>,
117 sync_parse_timeout: Duration,
118 syntax_map: Mutex<SyntaxMap>,
119 reparse: Option<Task<()>>,
120 parse_status: (watch::Sender<ParseStatus>, watch::Receiver<ParseStatus>),
121 non_text_state_update_count: usize,
122 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
123 remote_selections: TreeMap<ReplicaId, SelectionSet>,
124 diagnostics_timestamp: clock::Lamport,
125 completion_triggers: BTreeSet<String>,
126 completion_triggers_per_language_server: HashMap<LanguageServerId, BTreeSet<String>>,
127 completion_triggers_timestamp: clock::Lamport,
128 deferred_ops: OperationQueue<Operation>,
129 capability: Capability,
130 has_conflict: bool,
131 /// Memoize calls to has_changes_since(saved_version).
132 /// The contents of a cell are (self.version, has_changes) at the time of a last call.
133 has_unsaved_edits: Cell<(clock::Global, bool)>,
134 change_bits: Vec<rc::Weak<Cell<bool>>>,
135 _subscriptions: Vec<gpui::Subscription>,
136}
137
138#[derive(Copy, Clone, Debug, PartialEq, Eq)]
139pub enum ParseStatus {
140 Idle,
141 Parsing,
142}
143
144struct BufferBranchState {
145 base_buffer: Entity<Buffer>,
146 merged_operations: Vec<Lamport>,
147}
148
149/// An immutable, cheaply cloneable representation of a fixed
150/// state of a buffer.
151pub struct BufferSnapshot {
152 pub text: text::BufferSnapshot,
153 pub(crate) syntax: SyntaxSnapshot,
154 file: Option<Arc<dyn File>>,
155 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
156 remote_selections: TreeMap<ReplicaId, SelectionSet>,
157 language: Option<Arc<Language>>,
158 non_text_state_update_count: usize,
159}
160
161/// The kind and amount of indentation in a particular line. For now,
162/// assumes that indentation is all the same character.
163#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
164pub struct IndentSize {
165 /// The number of bytes that comprise the indentation.
166 pub len: u32,
167 /// The kind of whitespace used for indentation.
168 pub kind: IndentKind,
169}
170
171/// A whitespace character that's used for indentation.
172#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
173pub enum IndentKind {
174 /// An ASCII space character.
175 #[default]
176 Space,
177 /// An ASCII tab character.
178 Tab,
179}
180
181/// The shape of a selection cursor.
182#[derive(Copy, Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq, JsonSchema)]
183#[serde(rename_all = "snake_case")]
184pub enum CursorShape {
185 /// A vertical bar
186 #[default]
187 Bar,
188 /// A block that surrounds the following character
189 Block,
190 /// An underline that runs along the following character
191 Underline,
192 /// A box drawn around the following character
193 Hollow,
194}
195
196#[derive(Clone, Debug)]
197struct SelectionSet {
198 line_mode: bool,
199 cursor_shape: CursorShape,
200 selections: Arc<[Selection<Anchor>]>,
201 lamport_timestamp: clock::Lamport,
202}
203
204/// A diagnostic associated with a certain range of a buffer.
205#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
206pub struct Diagnostic {
207 /// The name of the service that produced this diagnostic.
208 pub source: Option<String>,
209 /// A machine-readable code that identifies this diagnostic.
210 pub code: Option<NumberOrString>,
211 pub code_description: Option<lsp::Url>,
212 /// Whether this diagnostic is a hint, warning, or error.
213 pub severity: DiagnosticSeverity,
214 /// The human-readable message associated with this diagnostic.
215 pub message: String,
216 /// The human-readable message (in markdown format)
217 pub markdown: Option<String>,
218 /// An id that identifies the group to which this diagnostic belongs.
219 ///
220 /// When a language server produces a diagnostic with
221 /// one or more associated diagnostics, those diagnostics are all
222 /// assigned a single group ID.
223 pub group_id: usize,
224 /// Whether this diagnostic is the primary diagnostic for its group.
225 ///
226 /// In a given group, the primary diagnostic is the top-level diagnostic
227 /// returned by the language server. The non-primary diagnostics are the
228 /// associated diagnostics.
229 pub is_primary: bool,
230 /// Whether this diagnostic is considered to originate from an analysis of
231 /// files on disk, as opposed to any unsaved buffer contents. This is a
232 /// property of a given diagnostic source, and is configured for a given
233 /// language server via the [`LspAdapter::disk_based_diagnostic_sources`](crate::LspAdapter::disk_based_diagnostic_sources) method
234 /// for the language server.
235 pub is_disk_based: bool,
236 /// Whether this diagnostic marks unnecessary code.
237 pub is_unnecessary: bool,
238 /// Data from language server that produced this diagnostic. Passed back to the LS when we request code actions for this diagnostic.
239 pub data: Option<Value>,
240}
241
242/// An operation used to synchronize this buffer with its other replicas.
243#[derive(Clone, Debug, PartialEq)]
244pub enum Operation {
245 /// A text operation.
246 Buffer(text::Operation),
247
248 /// An update to the buffer's diagnostics.
249 UpdateDiagnostics {
250 /// The id of the language server that produced the new diagnostics.
251 server_id: LanguageServerId,
252 /// The diagnostics.
253 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
254 /// The buffer's lamport timestamp.
255 lamport_timestamp: clock::Lamport,
256 },
257
258 /// An update to the most recent selections in this buffer.
259 UpdateSelections {
260 /// The selections.
261 selections: Arc<[Selection<Anchor>]>,
262 /// The buffer's lamport timestamp.
263 lamport_timestamp: clock::Lamport,
264 /// Whether the selections are in 'line mode'.
265 line_mode: bool,
266 /// The [`CursorShape`] associated with these selections.
267 cursor_shape: CursorShape,
268 },
269
270 /// An update to the characters that should trigger autocompletion
271 /// for this buffer.
272 UpdateCompletionTriggers {
273 /// The characters that trigger autocompletion.
274 triggers: Vec<String>,
275 /// The buffer's lamport timestamp.
276 lamport_timestamp: clock::Lamport,
277 /// The language server ID.
278 server_id: LanguageServerId,
279 },
280}
281
282/// An event that occurs in a buffer.
283#[derive(Clone, Debug, PartialEq)]
284pub enum BufferEvent {
285 /// The buffer was changed in a way that must be
286 /// propagated to its other replicas.
287 Operation {
288 operation: Operation,
289 is_local: bool,
290 },
291 /// The buffer was edited.
292 Edited,
293 /// The buffer's `dirty` bit changed.
294 DirtyChanged,
295 /// The buffer was saved.
296 Saved,
297 /// The buffer's file was changed on disk.
298 FileHandleChanged,
299 /// The buffer was reloaded.
300 Reloaded,
301 /// The buffer is in need of a reload
302 ReloadNeeded,
303 /// The buffer's language was changed.
304 LanguageChanged,
305 /// The buffer's syntax trees were updated.
306 Reparsed,
307 /// The buffer's diagnostics were updated.
308 DiagnosticsUpdated,
309 /// The buffer gained or lost editing capabilities.
310 CapabilityChanged,
311 /// The buffer was explicitly requested to close.
312 Closed,
313 /// The buffer was discarded when closing.
314 Discarded,
315}
316
317/// The file associated with a buffer.
318pub trait File: Send + Sync + Any {
319 /// Returns the [`LocalFile`] associated with this file, if the
320 /// file is local.
321 fn as_local(&self) -> Option<&dyn LocalFile>;
322
323 /// Returns whether this file is local.
324 fn is_local(&self) -> bool {
325 self.as_local().is_some()
326 }
327
328 /// Returns whether the file is new, exists in storage, or has been deleted. Includes metadata
329 /// only available in some states, such as modification time.
330 fn disk_state(&self) -> DiskState;
331
332 /// Returns the path of this file relative to the worktree's root directory.
333 fn path(&self) -> &Arc<Path>;
334
335 /// Returns the path of this file relative to the worktree's parent directory (this means it
336 /// includes the name of the worktree's root folder).
337 fn full_path(&self, cx: &App) -> PathBuf;
338
339 /// Returns the last component of this handle's absolute path. If this handle refers to the root
340 /// of its worktree, then this method will return the name of the worktree itself.
341 fn file_name<'a>(&'a self, cx: &'a App) -> &'a OsStr;
342
343 /// Returns the id of the worktree to which this file belongs.
344 ///
345 /// This is needed for looking up project-specific settings.
346 fn worktree_id(&self, cx: &App) -> WorktreeId;
347
348 /// Converts this file into a protobuf message.
349 fn to_proto(&self, cx: &App) -> rpc::proto::File;
350
351 /// Return whether Zed considers this to be a private file.
352 fn is_private(&self) -> bool;
353}
354
355/// The file's storage status - whether it's stored (`Present`), and if so when it was last
356/// modified. In the case where the file is not stored, it can be either `New` or `Deleted`. In the
357/// UI these two states are distinguished. For example, the buffer tab does not display a deletion
358/// indicator for new files.
359#[derive(Copy, Clone, Debug, PartialEq)]
360pub enum DiskState {
361 /// File created in Zed that has not been saved.
362 New,
363 /// File present on the filesystem.
364 Present { mtime: MTime },
365 /// Deleted file that was previously present.
366 Deleted,
367}
368
369impl DiskState {
370 /// Returns the file's last known modification time on disk.
371 pub fn mtime(self) -> Option<MTime> {
372 match self {
373 DiskState::New => None,
374 DiskState::Present { mtime } => Some(mtime),
375 DiskState::Deleted => None,
376 }
377 }
378
379 pub fn exists(&self) -> bool {
380 match self {
381 DiskState::New => false,
382 DiskState::Present { .. } => true,
383 DiskState::Deleted => false,
384 }
385 }
386}
387
388/// The file associated with a buffer, in the case where the file is on the local disk.
389pub trait LocalFile: File {
390 /// Returns the absolute path of this file
391 fn abs_path(&self, cx: &App) -> PathBuf;
392
393 /// Loads the file contents from disk and returns them as a UTF-8 encoded string.
394 fn load(&self, cx: &App) -> Task<Result<String>>;
395
396 /// Loads the file's contents from disk.
397 fn load_bytes(&self, cx: &App) -> Task<Result<Vec<u8>>>;
398}
399
400/// The auto-indent behavior associated with an editing operation.
401/// For some editing operations, each affected line of text has its
402/// indentation recomputed. For other operations, the entire block
403/// of edited text is adjusted uniformly.
404#[derive(Clone, Debug)]
405pub enum AutoindentMode {
406 /// Indent each line of inserted text.
407 EachLine,
408 /// Apply the same indentation adjustment to all of the lines
409 /// in a given insertion.
410 Block {
411 /// The original indentation column of the first line of each
412 /// insertion, if it has been copied.
413 ///
414 /// Knowing this makes it possible to preserve the relative indentation
415 /// of every line in the insertion from when it was copied.
416 ///
417 /// If the original indent column is `a`, and the first line of insertion
418 /// is then auto-indented to column `b`, then every other line of
419 /// the insertion will be auto-indented to column `b - a`
420 original_indent_columns: Vec<Option<u32>>,
421 },
422}
423
424#[derive(Clone)]
425struct AutoindentRequest {
426 before_edit: BufferSnapshot,
427 entries: Vec<AutoindentRequestEntry>,
428 is_block_mode: bool,
429 ignore_empty_lines: bool,
430}
431
432#[derive(Debug, Clone)]
433struct AutoindentRequestEntry {
434 /// A range of the buffer whose indentation should be adjusted.
435 range: Range<Anchor>,
436 /// Whether or not these lines should be considered brand new, for the
437 /// purpose of auto-indent. When text is not new, its indentation will
438 /// only be adjusted if the suggested indentation level has *changed*
439 /// since the edit was made.
440 first_line_is_new: bool,
441 indent_size: IndentSize,
442 original_indent_column: Option<u32>,
443}
444
445#[derive(Debug)]
446struct IndentSuggestion {
447 basis_row: u32,
448 delta: Ordering,
449 within_error: bool,
450}
451
452struct BufferChunkHighlights<'a> {
453 captures: SyntaxMapCaptures<'a>,
454 next_capture: Option<SyntaxMapCapture<'a>>,
455 stack: Vec<(usize, HighlightId)>,
456 highlight_maps: Vec<HighlightMap>,
457}
458
459/// An iterator that yields chunks of a buffer's text, along with their
460/// syntax highlights and diagnostic status.
461pub struct BufferChunks<'a> {
462 buffer_snapshot: Option<&'a BufferSnapshot>,
463 range: Range<usize>,
464 chunks: text::Chunks<'a>,
465 diagnostic_endpoints: Option<Peekable<vec::IntoIter<DiagnosticEndpoint>>>,
466 error_depth: usize,
467 warning_depth: usize,
468 information_depth: usize,
469 hint_depth: usize,
470 unnecessary_depth: usize,
471 highlights: Option<BufferChunkHighlights<'a>>,
472}
473
474/// A chunk of a buffer's text, along with its syntax highlight and
475/// diagnostic status.
476#[derive(Clone, Debug, Default)]
477pub struct Chunk<'a> {
478 /// The text of the chunk.
479 pub text: &'a str,
480 /// The syntax highlighting style of the chunk.
481 pub syntax_highlight_id: Option<HighlightId>,
482 /// The highlight style that has been applied to this chunk in
483 /// the editor.
484 pub highlight_style: Option<HighlightStyle>,
485 /// The severity of diagnostic associated with this chunk, if any.
486 pub diagnostic_severity: Option<DiagnosticSeverity>,
487 /// Whether this chunk of text is marked as unnecessary.
488 pub is_unnecessary: bool,
489 /// Whether this chunk of text was originally a tab character.
490 pub is_tab: bool,
491}
492
493/// A set of edits to a given version of a buffer, computed asynchronously.
494#[derive(Debug)]
495pub struct Diff {
496 pub base_version: clock::Global,
497 pub line_ending: LineEnding,
498 pub edits: Vec<(Range<usize>, Arc<str>)>,
499}
500
501#[derive(Clone, Copy)]
502pub(crate) struct DiagnosticEndpoint {
503 offset: usize,
504 is_start: bool,
505 severity: DiagnosticSeverity,
506 is_unnecessary: bool,
507}
508
509/// A class of characters, used for characterizing a run of text.
510#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
511pub enum CharKind {
512 /// Whitespace.
513 Whitespace,
514 /// Punctuation.
515 Punctuation,
516 /// Word.
517 Word,
518}
519
520/// A runnable is a set of data about a region that could be resolved into a task
521pub struct Runnable {
522 pub tags: SmallVec<[RunnableTag; 1]>,
523 pub language: Arc<Language>,
524 pub buffer: BufferId,
525}
526
527#[derive(Default, Clone, Debug)]
528pub struct HighlightedText {
529 pub text: SharedString,
530 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
531}
532
533#[derive(Default, Debug)]
534struct HighlightedTextBuilder {
535 pub text: String,
536 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
537}
538
539impl HighlightedText {
540 pub fn from_buffer_range<T: ToOffset>(
541 range: Range<T>,
542 snapshot: &text::BufferSnapshot,
543 syntax_snapshot: &SyntaxSnapshot,
544 override_style: Option<HighlightStyle>,
545 syntax_theme: &SyntaxTheme,
546 ) -> Self {
547 let mut highlighted_text = HighlightedTextBuilder::default();
548 highlighted_text.add_text_from_buffer_range(
549 range,
550 snapshot,
551 syntax_snapshot,
552 override_style,
553 syntax_theme,
554 );
555 highlighted_text.build()
556 }
557
558 pub fn to_styled_text(&self, default_style: &TextStyle) -> StyledText {
559 gpui::StyledText::new(self.text.clone())
560 .with_default_highlights(default_style, self.highlights.iter().cloned())
561 }
562
563 /// Returns the first line without leading whitespace unless highlighted
564 /// and a boolean indicating if there are more lines after
565 pub fn first_line_preview(self) -> (Self, bool) {
566 let newline_ix = self.text.find('\n').unwrap_or(self.text.len());
567 let first_line = &self.text[..newline_ix];
568
569 // Trim leading whitespace, unless an edit starts prior to it.
570 let mut preview_start_ix = first_line.len() - first_line.trim_start().len();
571 if let Some((first_highlight_range, _)) = self.highlights.first() {
572 preview_start_ix = preview_start_ix.min(first_highlight_range.start);
573 }
574
575 let preview_text = &first_line[preview_start_ix..];
576 let preview_highlights = self
577 .highlights
578 .into_iter()
579 .take_while(|(range, _)| range.start < newline_ix)
580 .filter_map(|(mut range, highlight)| {
581 range.start = range.start.saturating_sub(preview_start_ix);
582 range.end = range.end.saturating_sub(preview_start_ix).min(newline_ix);
583 if range.is_empty() {
584 None
585 } else {
586 Some((range, highlight))
587 }
588 });
589
590 let preview = Self {
591 text: SharedString::new(preview_text),
592 highlights: preview_highlights.collect(),
593 };
594
595 (preview, self.text.len() > newline_ix)
596 }
597}
598
599impl HighlightedTextBuilder {
600 pub fn build(self) -> HighlightedText {
601 HighlightedText {
602 text: self.text.into(),
603 highlights: self.highlights,
604 }
605 }
606
607 pub fn add_text_from_buffer_range<T: ToOffset>(
608 &mut self,
609 range: Range<T>,
610 snapshot: &text::BufferSnapshot,
611 syntax_snapshot: &SyntaxSnapshot,
612 override_style: Option<HighlightStyle>,
613 syntax_theme: &SyntaxTheme,
614 ) {
615 let range = range.to_offset(snapshot);
616 for chunk in Self::highlighted_chunks(range, snapshot, syntax_snapshot) {
617 let start = self.text.len();
618 self.text.push_str(chunk.text);
619 let end = self.text.len();
620
621 if let Some(mut highlight_style) = chunk
622 .syntax_highlight_id
623 .and_then(|id| id.style(syntax_theme))
624 {
625 if let Some(override_style) = override_style {
626 highlight_style.highlight(override_style);
627 }
628 self.highlights.push((start..end, highlight_style));
629 } else if let Some(override_style) = override_style {
630 self.highlights.push((start..end, override_style));
631 }
632 }
633 }
634
635 fn highlighted_chunks<'a>(
636 range: Range<usize>,
637 snapshot: &'a text::BufferSnapshot,
638 syntax_snapshot: &'a SyntaxSnapshot,
639 ) -> BufferChunks<'a> {
640 let captures = syntax_snapshot.captures(range.clone(), snapshot, |grammar| {
641 grammar.highlights_query.as_ref()
642 });
643
644 let highlight_maps = captures
645 .grammars()
646 .iter()
647 .map(|grammar| grammar.highlight_map())
648 .collect();
649
650 BufferChunks::new(
651 snapshot.as_rope(),
652 range,
653 Some((captures, highlight_maps)),
654 false,
655 None,
656 )
657 }
658}
659
660#[derive(Clone)]
661pub struct EditPreview {
662 old_snapshot: text::BufferSnapshot,
663 applied_edits_snapshot: text::BufferSnapshot,
664 syntax_snapshot: SyntaxSnapshot,
665}
666
667impl EditPreview {
668 pub fn highlight_edits(
669 &self,
670 current_snapshot: &BufferSnapshot,
671 edits: &[(Range<Anchor>, String)],
672 include_deletions: bool,
673 cx: &App,
674 ) -> HighlightedText {
675 let Some(visible_range_in_preview_snapshot) = self.compute_visible_range(edits) else {
676 return HighlightedText::default();
677 };
678
679 let mut highlighted_text = HighlightedTextBuilder::default();
680
681 let mut offset_in_preview_snapshot = visible_range_in_preview_snapshot.start;
682
683 let insertion_highlight_style = HighlightStyle {
684 background_color: Some(cx.theme().status().created_background),
685 ..Default::default()
686 };
687 let deletion_highlight_style = HighlightStyle {
688 background_color: Some(cx.theme().status().deleted_background),
689 ..Default::default()
690 };
691 let syntax_theme = cx.theme().syntax();
692
693 for (range, edit_text) in edits {
694 let edit_new_end_in_preview_snapshot = range
695 .end
696 .bias_right(&self.old_snapshot)
697 .to_offset(&self.applied_edits_snapshot);
698 let edit_start_in_preview_snapshot = edit_new_end_in_preview_snapshot - edit_text.len();
699
700 let unchanged_range_in_preview_snapshot =
701 offset_in_preview_snapshot..edit_start_in_preview_snapshot;
702 if !unchanged_range_in_preview_snapshot.is_empty() {
703 highlighted_text.add_text_from_buffer_range(
704 unchanged_range_in_preview_snapshot,
705 &self.applied_edits_snapshot,
706 &self.syntax_snapshot,
707 None,
708 &syntax_theme,
709 );
710 }
711
712 let range_in_current_snapshot = range.to_offset(current_snapshot);
713 if include_deletions && !range_in_current_snapshot.is_empty() {
714 highlighted_text.add_text_from_buffer_range(
715 range_in_current_snapshot,
716 ¤t_snapshot.text,
717 ¤t_snapshot.syntax,
718 Some(deletion_highlight_style),
719 &syntax_theme,
720 );
721 }
722
723 if !edit_text.is_empty() {
724 highlighted_text.add_text_from_buffer_range(
725 edit_start_in_preview_snapshot..edit_new_end_in_preview_snapshot,
726 &self.applied_edits_snapshot,
727 &self.syntax_snapshot,
728 Some(insertion_highlight_style),
729 &syntax_theme,
730 );
731 }
732
733 offset_in_preview_snapshot = edit_new_end_in_preview_snapshot;
734 }
735
736 highlighted_text.add_text_from_buffer_range(
737 offset_in_preview_snapshot..visible_range_in_preview_snapshot.end,
738 &self.applied_edits_snapshot,
739 &self.syntax_snapshot,
740 None,
741 &syntax_theme,
742 );
743
744 highlighted_text.build()
745 }
746
747 fn compute_visible_range(&self, edits: &[(Range<Anchor>, String)]) -> Option<Range<usize>> {
748 let (first, _) = edits.first()?;
749 let (last, _) = edits.last()?;
750
751 let start = first
752 .start
753 .bias_left(&self.old_snapshot)
754 .to_point(&self.applied_edits_snapshot);
755 let end = last
756 .end
757 .bias_right(&self.old_snapshot)
758 .to_point(&self.applied_edits_snapshot);
759
760 // Ensure that the first line of the first edit and the last line of the last edit are always fully visible
761 let range = Point::new(start.row, 0)
762 ..Point::new(end.row, self.applied_edits_snapshot.line_len(end.row));
763
764 Some(range.to_offset(&self.applied_edits_snapshot))
765 }
766}
767
768#[derive(Clone, Debug, PartialEq, Eq)]
769pub struct BracketMatch {
770 pub open_range: Range<usize>,
771 pub close_range: Range<usize>,
772 pub newline_only: bool,
773}
774
775impl Buffer {
776 /// Create a new buffer with the given base text.
777 pub fn local<T: Into<String>>(base_text: T, cx: &Context<Self>) -> Self {
778 Self::build(
779 TextBuffer::new(0, cx.entity_id().as_non_zero_u64().into(), base_text.into()),
780 None,
781 Capability::ReadWrite,
782 )
783 }
784
785 /// Create a new buffer with the given base text that has proper line endings and other normalization applied.
786 pub fn local_normalized(
787 base_text_normalized: Rope,
788 line_ending: LineEnding,
789 cx: &Context<Self>,
790 ) -> Self {
791 Self::build(
792 TextBuffer::new_normalized(
793 0,
794 cx.entity_id().as_non_zero_u64().into(),
795 line_ending,
796 base_text_normalized,
797 ),
798 None,
799 Capability::ReadWrite,
800 )
801 }
802
803 /// Create a new buffer that is a replica of a remote buffer.
804 pub fn remote(
805 remote_id: BufferId,
806 replica_id: ReplicaId,
807 capability: Capability,
808 base_text: impl Into<String>,
809 ) -> Self {
810 Self::build(
811 TextBuffer::new(replica_id, remote_id, base_text.into()),
812 None,
813 capability,
814 )
815 }
816
817 /// Create a new buffer that is a replica of a remote buffer, populating its
818 /// state from the given protobuf message.
819 pub fn from_proto(
820 replica_id: ReplicaId,
821 capability: Capability,
822 message: proto::BufferState,
823 file: Option<Arc<dyn File>>,
824 ) -> Result<Self> {
825 let buffer_id = BufferId::new(message.id)
826 .with_context(|| anyhow!("Could not deserialize buffer_id"))?;
827 let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text);
828 let mut this = Self::build(buffer, file, capability);
829 this.text.set_line_ending(proto::deserialize_line_ending(
830 rpc::proto::LineEnding::from_i32(message.line_ending)
831 .ok_or_else(|| anyhow!("missing line_ending"))?,
832 ));
833 this.saved_version = proto::deserialize_version(&message.saved_version);
834 this.saved_mtime = message.saved_mtime.map(|time| time.into());
835 Ok(this)
836 }
837
838 /// Serialize the buffer's state to a protobuf message.
839 pub fn to_proto(&self, cx: &App) -> proto::BufferState {
840 proto::BufferState {
841 id: self.remote_id().into(),
842 file: self.file.as_ref().map(|f| f.to_proto(cx)),
843 base_text: self.base_text().to_string(),
844 line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
845 saved_version: proto::serialize_version(&self.saved_version),
846 saved_mtime: self.saved_mtime.map(|time| time.into()),
847 }
848 }
849
850 /// Serialize as protobufs all of the changes to the buffer since the given version.
851 pub fn serialize_ops(
852 &self,
853 since: Option<clock::Global>,
854 cx: &App,
855 ) -> Task<Vec<proto::Operation>> {
856 let mut operations = Vec::new();
857 operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
858
859 operations.extend(self.remote_selections.iter().map(|(_, set)| {
860 proto::serialize_operation(&Operation::UpdateSelections {
861 selections: set.selections.clone(),
862 lamport_timestamp: set.lamport_timestamp,
863 line_mode: set.line_mode,
864 cursor_shape: set.cursor_shape,
865 })
866 }));
867
868 for (server_id, diagnostics) in &self.diagnostics {
869 operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
870 lamport_timestamp: self.diagnostics_timestamp,
871 server_id: *server_id,
872 diagnostics: diagnostics.iter().cloned().collect(),
873 }));
874 }
875
876 for (server_id, completions) in &self.completion_triggers_per_language_server {
877 operations.push(proto::serialize_operation(
878 &Operation::UpdateCompletionTriggers {
879 triggers: completions.iter().cloned().collect(),
880 lamport_timestamp: self.completion_triggers_timestamp,
881 server_id: *server_id,
882 },
883 ));
884 }
885
886 let text_operations = self.text.operations().clone();
887 cx.background_spawn(async move {
888 let since = since.unwrap_or_default();
889 operations.extend(
890 text_operations
891 .iter()
892 .filter(|(_, op)| !since.observed(op.timestamp()))
893 .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
894 );
895 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
896 operations
897 })
898 }
899
900 /// Assign a language to the buffer, returning the buffer.
901 pub fn with_language(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
902 self.set_language(Some(language), cx);
903 self
904 }
905
906 /// Returns the [`Capability`] of this buffer.
907 pub fn capability(&self) -> Capability {
908 self.capability
909 }
910
911 /// Whether this buffer can only be read.
912 pub fn read_only(&self) -> bool {
913 self.capability == Capability::ReadOnly
914 }
915
916 /// Builds a [`Buffer`] with the given underlying [`TextBuffer`], diff base, [`File`] and [`Capability`].
917 pub fn build(buffer: TextBuffer, file: Option<Arc<dyn File>>, capability: Capability) -> Self {
918 let saved_mtime = file.as_ref().and_then(|file| file.disk_state().mtime());
919 let snapshot = buffer.snapshot();
920 let syntax_map = Mutex::new(SyntaxMap::new(&snapshot));
921 Self {
922 saved_mtime,
923 saved_version: buffer.version(),
924 preview_version: buffer.version(),
925 reload_task: None,
926 transaction_depth: 0,
927 was_dirty_before_starting_transaction: None,
928 has_unsaved_edits: Cell::new((buffer.version(), false)),
929 text: buffer,
930 branch_state: None,
931 file,
932 capability,
933 syntax_map,
934 reparse: None,
935 non_text_state_update_count: 0,
936 sync_parse_timeout: Duration::from_millis(1),
937 parse_status: async_watch::channel(ParseStatus::Idle),
938 autoindent_requests: Default::default(),
939 pending_autoindent: Default::default(),
940 language: None,
941 remote_selections: Default::default(),
942 diagnostics: Default::default(),
943 diagnostics_timestamp: Default::default(),
944 completion_triggers: Default::default(),
945 completion_triggers_per_language_server: Default::default(),
946 completion_triggers_timestamp: Default::default(),
947 deferred_ops: OperationQueue::new(),
948 has_conflict: false,
949 change_bits: Default::default(),
950 _subscriptions: Vec::new(),
951 }
952 }
953
954 pub fn build_snapshot(
955 text: Rope,
956 language: Option<Arc<Language>>,
957 language_registry: Option<Arc<LanguageRegistry>>,
958 cx: &mut App,
959 ) -> impl Future<Output = BufferSnapshot> + use<> {
960 let entity_id = cx.reserve_entity::<Self>().entity_id();
961 let buffer_id = entity_id.as_non_zero_u64().into();
962 async move {
963 let text =
964 TextBuffer::new_normalized(0, buffer_id, Default::default(), text).snapshot();
965 let mut syntax = SyntaxMap::new(&text).snapshot();
966 if let Some(language) = language.clone() {
967 let text = text.clone();
968 let language = language.clone();
969 let language_registry = language_registry.clone();
970 syntax.reparse(&text, language_registry, language);
971 }
972 BufferSnapshot {
973 text,
974 syntax,
975 file: None,
976 diagnostics: Default::default(),
977 remote_selections: Default::default(),
978 language,
979 non_text_state_update_count: 0,
980 }
981 }
982 }
983
984 pub fn build_empty_snapshot(cx: &mut App) -> BufferSnapshot {
985 let entity_id = cx.reserve_entity::<Self>().entity_id();
986 let buffer_id = entity_id.as_non_zero_u64().into();
987 let text =
988 TextBuffer::new_normalized(0, buffer_id, Default::default(), Rope::new()).snapshot();
989 let syntax = SyntaxMap::new(&text).snapshot();
990 BufferSnapshot {
991 text,
992 syntax,
993 file: None,
994 diagnostics: Default::default(),
995 remote_selections: Default::default(),
996 language: None,
997 non_text_state_update_count: 0,
998 }
999 }
1000
1001 #[cfg(any(test, feature = "test-support"))]
1002 pub fn build_snapshot_sync(
1003 text: Rope,
1004 language: Option<Arc<Language>>,
1005 language_registry: Option<Arc<LanguageRegistry>>,
1006 cx: &mut App,
1007 ) -> BufferSnapshot {
1008 let entity_id = cx.reserve_entity::<Self>().entity_id();
1009 let buffer_id = entity_id.as_non_zero_u64().into();
1010 let text = TextBuffer::new_normalized(0, buffer_id, Default::default(), text).snapshot();
1011 let mut syntax = SyntaxMap::new(&text).snapshot();
1012 if let Some(language) = language.clone() {
1013 let text = text.clone();
1014 let language = language.clone();
1015 let language_registry = language_registry.clone();
1016 syntax.reparse(&text, language_registry, language);
1017 }
1018 BufferSnapshot {
1019 text,
1020 syntax,
1021 file: None,
1022 diagnostics: Default::default(),
1023 remote_selections: Default::default(),
1024 language,
1025 non_text_state_update_count: 0,
1026 }
1027 }
1028
1029 /// Retrieve a snapshot of the buffer's current state. This is computationally
1030 /// cheap, and allows reading from the buffer on a background thread.
1031 pub fn snapshot(&self) -> BufferSnapshot {
1032 let text = self.text.snapshot();
1033 let mut syntax_map = self.syntax_map.lock();
1034 syntax_map.interpolate(&text);
1035 let syntax = syntax_map.snapshot();
1036
1037 BufferSnapshot {
1038 text,
1039 syntax,
1040 file: self.file.clone(),
1041 remote_selections: self.remote_selections.clone(),
1042 diagnostics: self.diagnostics.clone(),
1043 language: self.language.clone(),
1044 non_text_state_update_count: self.non_text_state_update_count,
1045 }
1046 }
1047
1048 pub fn branch(&mut self, cx: &mut Context<Self>) -> Entity<Self> {
1049 let this = cx.entity();
1050 cx.new(|cx| {
1051 let mut branch = Self {
1052 branch_state: Some(BufferBranchState {
1053 base_buffer: this.clone(),
1054 merged_operations: Default::default(),
1055 }),
1056 language: self.language.clone(),
1057 has_conflict: self.has_conflict,
1058 has_unsaved_edits: Cell::new(self.has_unsaved_edits.get_mut().clone()),
1059 _subscriptions: vec![cx.subscribe(&this, Self::on_base_buffer_event)],
1060 ..Self::build(self.text.branch(), self.file.clone(), self.capability())
1061 };
1062 if let Some(language_registry) = self.language_registry() {
1063 branch.set_language_registry(language_registry);
1064 }
1065
1066 // Reparse the branch buffer so that we get syntax highlighting immediately.
1067 branch.reparse(cx);
1068
1069 branch
1070 })
1071 }
1072
1073 pub fn preview_edits(
1074 &self,
1075 edits: Arc<[(Range<Anchor>, String)]>,
1076 cx: &App,
1077 ) -> Task<EditPreview> {
1078 let registry = self.language_registry();
1079 let language = self.language().cloned();
1080 let old_snapshot = self.text.snapshot();
1081 let mut branch_buffer = self.text.branch();
1082 let mut syntax_snapshot = self.syntax_map.lock().snapshot();
1083 cx.background_spawn(async move {
1084 if !edits.is_empty() {
1085 if let Some(language) = language.clone() {
1086 syntax_snapshot.reparse(&old_snapshot, registry.clone(), language);
1087 }
1088
1089 branch_buffer.edit(edits.iter().cloned());
1090 let snapshot = branch_buffer.snapshot();
1091 syntax_snapshot.interpolate(&snapshot);
1092
1093 if let Some(language) = language {
1094 syntax_snapshot.reparse(&snapshot, registry, language);
1095 }
1096 }
1097 EditPreview {
1098 old_snapshot,
1099 applied_edits_snapshot: branch_buffer.snapshot(),
1100 syntax_snapshot,
1101 }
1102 })
1103 }
1104
1105 /// Applies all of the changes in this buffer that intersect any of the
1106 /// given `ranges` to its base buffer.
1107 ///
1108 /// If `ranges` is empty, then all changes will be applied. This buffer must
1109 /// be a branch buffer to call this method.
1110 pub fn merge_into_base(&mut self, ranges: Vec<Range<usize>>, cx: &mut Context<Self>) {
1111 let Some(base_buffer) = self.base_buffer() else {
1112 debug_panic!("not a branch buffer");
1113 return;
1114 };
1115
1116 let mut ranges = if ranges.is_empty() {
1117 &[0..usize::MAX]
1118 } else {
1119 ranges.as_slice()
1120 }
1121 .into_iter()
1122 .peekable();
1123
1124 let mut edits = Vec::new();
1125 for edit in self.edits_since::<usize>(&base_buffer.read(cx).version()) {
1126 let mut is_included = false;
1127 while let Some(range) = ranges.peek() {
1128 if range.end < edit.new.start {
1129 ranges.next().unwrap();
1130 } else {
1131 if range.start <= edit.new.end {
1132 is_included = true;
1133 }
1134 break;
1135 }
1136 }
1137
1138 if is_included {
1139 edits.push((
1140 edit.old.clone(),
1141 self.text_for_range(edit.new.clone()).collect::<String>(),
1142 ));
1143 }
1144 }
1145
1146 let operation = base_buffer.update(cx, |base_buffer, cx| {
1147 // cx.emit(BufferEvent::DiffBaseChanged);
1148 base_buffer.edit(edits, None, cx)
1149 });
1150
1151 if let Some(operation) = operation {
1152 if let Some(BufferBranchState {
1153 merged_operations, ..
1154 }) = &mut self.branch_state
1155 {
1156 merged_operations.push(operation);
1157 }
1158 }
1159 }
1160
1161 fn on_base_buffer_event(
1162 &mut self,
1163 _: Entity<Buffer>,
1164 event: &BufferEvent,
1165 cx: &mut Context<Self>,
1166 ) {
1167 let BufferEvent::Operation { operation, .. } = event else {
1168 return;
1169 };
1170 let Some(BufferBranchState {
1171 merged_operations, ..
1172 }) = &mut self.branch_state
1173 else {
1174 return;
1175 };
1176
1177 let mut operation_to_undo = None;
1178 if let Operation::Buffer(text::Operation::Edit(operation)) = &operation {
1179 if let Ok(ix) = merged_operations.binary_search(&operation.timestamp) {
1180 merged_operations.remove(ix);
1181 operation_to_undo = Some(operation.timestamp);
1182 }
1183 }
1184
1185 self.apply_ops([operation.clone()], cx);
1186
1187 if let Some(timestamp) = operation_to_undo {
1188 let counts = [(timestamp, u32::MAX)].into_iter().collect();
1189 self.undo_operations(counts, cx);
1190 }
1191 }
1192
1193 #[cfg(test)]
1194 pub(crate) fn as_text_snapshot(&self) -> &text::BufferSnapshot {
1195 &self.text
1196 }
1197
1198 /// Retrieve a snapshot of the buffer's raw text, without any
1199 /// language-related state like the syntax tree or diagnostics.
1200 pub fn text_snapshot(&self) -> text::BufferSnapshot {
1201 self.text.snapshot()
1202 }
1203
1204 /// The file associated with the buffer, if any.
1205 pub fn file(&self) -> Option<&Arc<dyn File>> {
1206 self.file.as_ref()
1207 }
1208
1209 /// The version of the buffer that was last saved or reloaded from disk.
1210 pub fn saved_version(&self) -> &clock::Global {
1211 &self.saved_version
1212 }
1213
1214 /// The mtime of the buffer's file when the buffer was last saved or reloaded from disk.
1215 pub fn saved_mtime(&self) -> Option<MTime> {
1216 self.saved_mtime
1217 }
1218
1219 /// Assign a language to the buffer.
1220 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1221 self.non_text_state_update_count += 1;
1222 self.syntax_map.lock().clear(&self.text);
1223 self.language = language;
1224 self.was_changed();
1225 self.reparse(cx);
1226 cx.emit(BufferEvent::LanguageChanged);
1227 }
1228
1229 /// Assign a language registry to the buffer. This allows the buffer to retrieve
1230 /// other languages if parts of the buffer are written in different languages.
1231 pub fn set_language_registry(&self, language_registry: Arc<LanguageRegistry>) {
1232 self.syntax_map
1233 .lock()
1234 .set_language_registry(language_registry);
1235 }
1236
1237 pub fn language_registry(&self) -> Option<Arc<LanguageRegistry>> {
1238 self.syntax_map.lock().language_registry()
1239 }
1240
1241 /// Assign the buffer a new [`Capability`].
1242 pub fn set_capability(&mut self, capability: Capability, cx: &mut Context<Self>) {
1243 self.capability = capability;
1244 cx.emit(BufferEvent::CapabilityChanged)
1245 }
1246
1247 /// This method is called to signal that the buffer has been saved.
1248 pub fn did_save(
1249 &mut self,
1250 version: clock::Global,
1251 mtime: Option<MTime>,
1252 cx: &mut Context<Self>,
1253 ) {
1254 self.saved_version = version;
1255 self.has_unsaved_edits
1256 .set((self.saved_version().clone(), false));
1257 self.has_conflict = false;
1258 self.saved_mtime = mtime;
1259 self.was_changed();
1260 cx.emit(BufferEvent::Saved);
1261 cx.notify();
1262 }
1263
1264 /// This method is called to signal that the buffer has been discarded.
1265 pub fn discarded(&self, cx: &mut Context<Self>) {
1266 cx.emit(BufferEvent::Discarded);
1267 cx.notify();
1268 }
1269
1270 /// Reloads the contents of the buffer from disk.
1271 pub fn reload(&mut self, cx: &Context<Self>) -> oneshot::Receiver<Option<Transaction>> {
1272 let (tx, rx) = futures::channel::oneshot::channel();
1273 let prev_version = self.text.version();
1274 self.reload_task = Some(cx.spawn(async move |this, cx| {
1275 let Some((new_mtime, new_text)) = this.update(cx, |this, cx| {
1276 let file = this.file.as_ref()?.as_local()?;
1277
1278 Some((file.disk_state().mtime(), file.load(cx)))
1279 })?
1280 else {
1281 return Ok(());
1282 };
1283
1284 let new_text = new_text.await?;
1285 let diff = this
1286 .update(cx, |this, cx| this.diff(new_text.clone(), cx))?
1287 .await;
1288 this.update(cx, |this, cx| {
1289 if this.version() == diff.base_version {
1290 this.finalize_last_transaction();
1291 this.apply_diff(diff, cx);
1292 tx.send(this.finalize_last_transaction().cloned()).ok();
1293 this.has_conflict = false;
1294 this.did_reload(this.version(), this.line_ending(), new_mtime, cx);
1295 } else {
1296 if !diff.edits.is_empty()
1297 || this
1298 .edits_since::<usize>(&diff.base_version)
1299 .next()
1300 .is_some()
1301 {
1302 this.has_conflict = true;
1303 }
1304
1305 this.did_reload(prev_version, this.line_ending(), this.saved_mtime, cx);
1306 }
1307
1308 this.reload_task.take();
1309 })
1310 }));
1311 rx
1312 }
1313
1314 /// This method is called to signal that the buffer has been reloaded.
1315 pub fn did_reload(
1316 &mut self,
1317 version: clock::Global,
1318 line_ending: LineEnding,
1319 mtime: Option<MTime>,
1320 cx: &mut Context<Self>,
1321 ) {
1322 self.saved_version = version;
1323 self.has_unsaved_edits
1324 .set((self.saved_version.clone(), false));
1325 self.text.set_line_ending(line_ending);
1326 self.saved_mtime = mtime;
1327 cx.emit(BufferEvent::Reloaded);
1328 cx.notify();
1329 }
1330
1331 /// Updates the [`File`] backing this buffer. This should be called when
1332 /// the file has changed or has been deleted.
1333 pub fn file_updated(&mut self, new_file: Arc<dyn File>, cx: &mut Context<Self>) {
1334 let was_dirty = self.is_dirty();
1335 let mut file_changed = false;
1336
1337 if let Some(old_file) = self.file.as_ref() {
1338 if new_file.path() != old_file.path() {
1339 file_changed = true;
1340 }
1341
1342 let old_state = old_file.disk_state();
1343 let new_state = new_file.disk_state();
1344 if old_state != new_state {
1345 file_changed = true;
1346 if !was_dirty && matches!(new_state, DiskState::Present { .. }) {
1347 cx.emit(BufferEvent::ReloadNeeded)
1348 }
1349 }
1350 } else {
1351 file_changed = true;
1352 };
1353
1354 self.file = Some(new_file);
1355 if file_changed {
1356 self.was_changed();
1357 self.non_text_state_update_count += 1;
1358 if was_dirty != self.is_dirty() {
1359 cx.emit(BufferEvent::DirtyChanged);
1360 }
1361 cx.emit(BufferEvent::FileHandleChanged);
1362 cx.notify();
1363 }
1364 }
1365
1366 pub fn base_buffer(&self) -> Option<Entity<Self>> {
1367 Some(self.branch_state.as_ref()?.base_buffer.clone())
1368 }
1369
1370 /// Returns the primary [`Language`] assigned to this [`Buffer`].
1371 pub fn language(&self) -> Option<&Arc<Language>> {
1372 self.language.as_ref()
1373 }
1374
1375 /// Returns the [`Language`] at the given location.
1376 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
1377 let offset = position.to_offset(self);
1378 self.syntax_map
1379 .lock()
1380 .layers_for_range(offset..offset, &self.text, false)
1381 .last()
1382 .map(|info| info.language.clone())
1383 .or_else(|| self.language.clone())
1384 }
1385
1386 /// Returns each [`Language`] for the active syntax layers at the given location.
1387 pub fn languages_at<D: ToOffset>(&self, position: D) -> Vec<Arc<Language>> {
1388 let offset = position.to_offset(self);
1389 let mut languages: Vec<Arc<Language>> = self
1390 .syntax_map
1391 .lock()
1392 .layers_for_range(offset..offset, &self.text, false)
1393 .map(|info| info.language.clone())
1394 .collect();
1395
1396 if languages.is_empty() {
1397 if let Some(buffer_language) = self.language() {
1398 languages.push(buffer_language.clone());
1399 }
1400 }
1401
1402 languages
1403 }
1404
1405 /// An integer version number that accounts for all updates besides
1406 /// the buffer's text itself (which is versioned via a version vector).
1407 pub fn non_text_state_update_count(&self) -> usize {
1408 self.non_text_state_update_count
1409 }
1410
1411 /// Whether the buffer is being parsed in the background.
1412 #[cfg(any(test, feature = "test-support"))]
1413 pub fn is_parsing(&self) -> bool {
1414 self.reparse.is_some()
1415 }
1416
1417 /// Indicates whether the buffer contains any regions that may be
1418 /// written in a language that hasn't been loaded yet.
1419 pub fn contains_unknown_injections(&self) -> bool {
1420 self.syntax_map.lock().contains_unknown_injections()
1421 }
1422
1423 #[cfg(test)]
1424 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
1425 self.sync_parse_timeout = timeout;
1426 }
1427
1428 /// Called after an edit to synchronize the buffer's main parse tree with
1429 /// the buffer's new underlying state.
1430 ///
1431 /// Locks the syntax map and interpolates the edits since the last reparse
1432 /// into the foreground syntax tree.
1433 ///
1434 /// Then takes a stable snapshot of the syntax map before unlocking it.
1435 /// The snapshot with the interpolated edits is sent to a background thread,
1436 /// where we ask Tree-sitter to perform an incremental parse.
1437 ///
1438 /// Meanwhile, in the foreground, we block the main thread for up to 1ms
1439 /// waiting on the parse to complete. As soon as it completes, we proceed
1440 /// synchronously, unless a 1ms timeout elapses.
1441 ///
1442 /// If we time out waiting on the parse, we spawn a second task waiting
1443 /// until the parse does complete and return with the interpolated tree still
1444 /// in the foreground. When the background parse completes, call back into
1445 /// the main thread and assign the foreground parse state.
1446 ///
1447 /// If the buffer or grammar changed since the start of the background parse,
1448 /// initiate an additional reparse recursively. To avoid concurrent parses
1449 /// for the same buffer, we only initiate a new parse if we are not already
1450 /// parsing in the background.
1451 pub fn reparse(&mut self, cx: &mut Context<Self>) {
1452 if self.reparse.is_some() {
1453 return;
1454 }
1455 let language = if let Some(language) = self.language.clone() {
1456 language
1457 } else {
1458 return;
1459 };
1460
1461 let text = self.text_snapshot();
1462 let parsed_version = self.version();
1463
1464 let mut syntax_map = self.syntax_map.lock();
1465 syntax_map.interpolate(&text);
1466 let language_registry = syntax_map.language_registry();
1467 let mut syntax_snapshot = syntax_map.snapshot();
1468 drop(syntax_map);
1469
1470 let parse_task = cx.background_spawn({
1471 let language = language.clone();
1472 let language_registry = language_registry.clone();
1473 async move {
1474 syntax_snapshot.reparse(&text, language_registry, language);
1475 syntax_snapshot
1476 }
1477 });
1478
1479 self.parse_status.0.send(ParseStatus::Parsing).unwrap();
1480 match cx
1481 .background_executor()
1482 .block_with_timeout(self.sync_parse_timeout, parse_task)
1483 {
1484 Ok(new_syntax_snapshot) => {
1485 self.did_finish_parsing(new_syntax_snapshot, cx);
1486 self.reparse = None;
1487 }
1488 Err(parse_task) => {
1489 self.reparse = Some(cx.spawn(async move |this, cx| {
1490 let new_syntax_map = parse_task.await;
1491 this.update(cx, move |this, cx| {
1492 let grammar_changed =
1493 this.language.as_ref().map_or(true, |current_language| {
1494 !Arc::ptr_eq(&language, current_language)
1495 });
1496 let language_registry_changed = new_syntax_map
1497 .contains_unknown_injections()
1498 && language_registry.map_or(false, |registry| {
1499 registry.version() != new_syntax_map.language_registry_version()
1500 });
1501 let parse_again = language_registry_changed
1502 || grammar_changed
1503 || this.version.changed_since(&parsed_version);
1504 this.did_finish_parsing(new_syntax_map, cx);
1505 this.reparse = None;
1506 if parse_again {
1507 this.reparse(cx);
1508 }
1509 })
1510 .ok();
1511 }));
1512 }
1513 }
1514 }
1515
1516 fn did_finish_parsing(&mut self, syntax_snapshot: SyntaxSnapshot, cx: &mut Context<Self>) {
1517 self.was_changed();
1518 self.non_text_state_update_count += 1;
1519 self.syntax_map.lock().did_parse(syntax_snapshot);
1520 self.request_autoindent(cx);
1521 self.parse_status.0.send(ParseStatus::Idle).unwrap();
1522 cx.emit(BufferEvent::Reparsed);
1523 cx.notify();
1524 }
1525
1526 pub fn parse_status(&self) -> watch::Receiver<ParseStatus> {
1527 self.parse_status.1.clone()
1528 }
1529
1530 /// Assign to the buffer a set of diagnostics created by a given language server.
1531 pub fn update_diagnostics(
1532 &mut self,
1533 server_id: LanguageServerId,
1534 diagnostics: DiagnosticSet,
1535 cx: &mut Context<Self>,
1536 ) {
1537 let lamport_timestamp = self.text.lamport_clock.tick();
1538 let op = Operation::UpdateDiagnostics {
1539 server_id,
1540 diagnostics: diagnostics.iter().cloned().collect(),
1541 lamport_timestamp,
1542 };
1543 self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
1544 self.send_operation(op, true, cx);
1545 }
1546
1547 pub fn get_diagnostics(&self, server_id: LanguageServerId) -> Option<&DiagnosticSet> {
1548 let Ok(idx) = self.diagnostics.binary_search_by_key(&server_id, |v| v.0) else {
1549 return None;
1550 };
1551 Some(&self.diagnostics[idx].1)
1552 }
1553
1554 fn request_autoindent(&mut self, cx: &mut Context<Self>) {
1555 if let Some(indent_sizes) = self.compute_autoindents() {
1556 let indent_sizes = cx.background_spawn(indent_sizes);
1557 match cx
1558 .background_executor()
1559 .block_with_timeout(Duration::from_micros(500), indent_sizes)
1560 {
1561 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
1562 Err(indent_sizes) => {
1563 self.pending_autoindent = Some(cx.spawn(async move |this, cx| {
1564 let indent_sizes = indent_sizes.await;
1565 this.update(cx, |this, cx| {
1566 this.apply_autoindents(indent_sizes, cx);
1567 })
1568 .ok();
1569 }));
1570 }
1571 }
1572 } else {
1573 self.autoindent_requests.clear();
1574 }
1575 }
1576
1577 fn compute_autoindents(
1578 &self,
1579 ) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>> + use<>> {
1580 let max_rows_between_yields = 100;
1581 let snapshot = self.snapshot();
1582 if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
1583 return None;
1584 }
1585
1586 let autoindent_requests = self.autoindent_requests.clone();
1587 Some(async move {
1588 let mut indent_sizes = BTreeMap::<u32, (IndentSize, bool)>::new();
1589 for request in autoindent_requests {
1590 // Resolve each edited range to its row in the current buffer and in the
1591 // buffer before this batch of edits.
1592 let mut row_ranges = Vec::new();
1593 let mut old_to_new_rows = BTreeMap::new();
1594 let mut language_indent_sizes_by_new_row = Vec::new();
1595 for entry in &request.entries {
1596 let position = entry.range.start;
1597 let new_row = position.to_point(&snapshot).row;
1598 let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
1599 language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
1600
1601 if !entry.first_line_is_new {
1602 let old_row = position.to_point(&request.before_edit).row;
1603 old_to_new_rows.insert(old_row, new_row);
1604 }
1605 row_ranges.push((new_row..new_end_row, entry.original_indent_column));
1606 }
1607
1608 // Build a map containing the suggested indentation for each of the edited lines
1609 // with respect to the state of the buffer before these edits. This map is keyed
1610 // by the rows for these lines in the current state of the buffer.
1611 let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
1612 let old_edited_ranges =
1613 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
1614 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1615 let mut language_indent_size = IndentSize::default();
1616 for old_edited_range in old_edited_ranges {
1617 let suggestions = request
1618 .before_edit
1619 .suggest_autoindents(old_edited_range.clone())
1620 .into_iter()
1621 .flatten();
1622 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1623 if let Some(suggestion) = suggestion {
1624 let new_row = *old_to_new_rows.get(&old_row).unwrap();
1625
1626 // Find the indent size based on the language for this row.
1627 while let Some((row, size)) = language_indent_sizes.peek() {
1628 if *row > new_row {
1629 break;
1630 }
1631 language_indent_size = *size;
1632 language_indent_sizes.next();
1633 }
1634
1635 let suggested_indent = old_to_new_rows
1636 .get(&suggestion.basis_row)
1637 .and_then(|from_row| {
1638 Some(old_suggestions.get(from_row).copied()?.0)
1639 })
1640 .unwrap_or_else(|| {
1641 request
1642 .before_edit
1643 .indent_size_for_line(suggestion.basis_row)
1644 })
1645 .with_delta(suggestion.delta, language_indent_size);
1646 old_suggestions
1647 .insert(new_row, (suggested_indent, suggestion.within_error));
1648 }
1649 }
1650 yield_now().await;
1651 }
1652
1653 // Compute new suggestions for each line, but only include them in the result
1654 // if they differ from the old suggestion for that line.
1655 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1656 let mut language_indent_size = IndentSize::default();
1657 for (row_range, original_indent_column) in row_ranges {
1658 let new_edited_row_range = if request.is_block_mode {
1659 row_range.start..row_range.start + 1
1660 } else {
1661 row_range.clone()
1662 };
1663
1664 let suggestions = snapshot
1665 .suggest_autoindents(new_edited_row_range.clone())
1666 .into_iter()
1667 .flatten();
1668 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
1669 if let Some(suggestion) = suggestion {
1670 // Find the indent size based on the language for this row.
1671 while let Some((row, size)) = language_indent_sizes.peek() {
1672 if *row > new_row {
1673 break;
1674 }
1675 language_indent_size = *size;
1676 language_indent_sizes.next();
1677 }
1678
1679 let suggested_indent = indent_sizes
1680 .get(&suggestion.basis_row)
1681 .copied()
1682 .map(|e| e.0)
1683 .unwrap_or_else(|| {
1684 snapshot.indent_size_for_line(suggestion.basis_row)
1685 })
1686 .with_delta(suggestion.delta, language_indent_size);
1687
1688 if old_suggestions.get(&new_row).map_or(
1689 true,
1690 |(old_indentation, was_within_error)| {
1691 suggested_indent != *old_indentation
1692 && (!suggestion.within_error || *was_within_error)
1693 },
1694 ) {
1695 indent_sizes.insert(
1696 new_row,
1697 (suggested_indent, request.ignore_empty_lines),
1698 );
1699 }
1700 }
1701 }
1702
1703 if let (true, Some(original_indent_column)) =
1704 (request.is_block_mode, original_indent_column)
1705 {
1706 let new_indent =
1707 if let Some((indent, _)) = indent_sizes.get(&row_range.start) {
1708 *indent
1709 } else {
1710 snapshot.indent_size_for_line(row_range.start)
1711 };
1712 let delta = new_indent.len as i64 - original_indent_column as i64;
1713 if delta != 0 {
1714 for row in row_range.skip(1) {
1715 indent_sizes.entry(row).or_insert_with(|| {
1716 let mut size = snapshot.indent_size_for_line(row);
1717 if size.kind == new_indent.kind {
1718 match delta.cmp(&0) {
1719 Ordering::Greater => size.len += delta as u32,
1720 Ordering::Less => {
1721 size.len = size.len.saturating_sub(-delta as u32)
1722 }
1723 Ordering::Equal => {}
1724 }
1725 }
1726 (size, request.ignore_empty_lines)
1727 });
1728 }
1729 }
1730 }
1731
1732 yield_now().await;
1733 }
1734 }
1735
1736 indent_sizes
1737 .into_iter()
1738 .filter_map(|(row, (indent, ignore_empty_lines))| {
1739 if ignore_empty_lines && snapshot.line_len(row) == 0 {
1740 None
1741 } else {
1742 Some((row, indent))
1743 }
1744 })
1745 .collect()
1746 })
1747 }
1748
1749 fn apply_autoindents(
1750 &mut self,
1751 indent_sizes: BTreeMap<u32, IndentSize>,
1752 cx: &mut Context<Self>,
1753 ) {
1754 self.autoindent_requests.clear();
1755
1756 let edits: Vec<_> = indent_sizes
1757 .into_iter()
1758 .filter_map(|(row, indent_size)| {
1759 let current_size = indent_size_for_line(self, row);
1760 Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
1761 })
1762 .collect();
1763
1764 let preserve_preview = self.preserve_preview();
1765 self.edit(edits, None, cx);
1766 if preserve_preview {
1767 self.refresh_preview();
1768 }
1769 }
1770
1771 /// Create a minimal edit that will cause the given row to be indented
1772 /// with the given size. After applying this edit, the length of the line
1773 /// will always be at least `new_size.len`.
1774 pub fn edit_for_indent_size_adjustment(
1775 row: u32,
1776 current_size: IndentSize,
1777 new_size: IndentSize,
1778 ) -> Option<(Range<Point>, String)> {
1779 if new_size.kind == current_size.kind {
1780 match new_size.len.cmp(¤t_size.len) {
1781 Ordering::Greater => {
1782 let point = Point::new(row, 0);
1783 Some((
1784 point..point,
1785 iter::repeat(new_size.char())
1786 .take((new_size.len - current_size.len) as usize)
1787 .collect::<String>(),
1788 ))
1789 }
1790
1791 Ordering::Less => Some((
1792 Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
1793 String::new(),
1794 )),
1795
1796 Ordering::Equal => None,
1797 }
1798 } else {
1799 Some((
1800 Point::new(row, 0)..Point::new(row, current_size.len),
1801 iter::repeat(new_size.char())
1802 .take(new_size.len as usize)
1803 .collect::<String>(),
1804 ))
1805 }
1806 }
1807
1808 /// Spawns a background task that asynchronously computes a `Diff` between the buffer's text
1809 /// and the given new text.
1810 pub fn diff(&self, mut new_text: String, cx: &App) -> Task<Diff> {
1811 let old_text = self.as_rope().clone();
1812 let base_version = self.version();
1813 cx.background_executor()
1814 .spawn_labeled(*BUFFER_DIFF_TASK, async move {
1815 let old_text = old_text.to_string();
1816 let line_ending = LineEnding::detect(&new_text);
1817 LineEnding::normalize(&mut new_text);
1818 let edits = text_diff(&old_text, &new_text);
1819 Diff {
1820 base_version,
1821 line_ending,
1822 edits,
1823 }
1824 })
1825 }
1826
1827 /// Spawns a background task that searches the buffer for any whitespace
1828 /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
1829 pub fn remove_trailing_whitespace(&self, cx: &App) -> Task<Diff> {
1830 let old_text = self.as_rope().clone();
1831 let line_ending = self.line_ending();
1832 let base_version = self.version();
1833 cx.background_spawn(async move {
1834 let ranges = trailing_whitespace_ranges(&old_text);
1835 let empty = Arc::<str>::from("");
1836 Diff {
1837 base_version,
1838 line_ending,
1839 edits: ranges
1840 .into_iter()
1841 .map(|range| (range, empty.clone()))
1842 .collect(),
1843 }
1844 })
1845 }
1846
1847 /// Ensures that the buffer ends with a single newline character, and
1848 /// no other whitespace.
1849 pub fn ensure_final_newline(&mut self, cx: &mut Context<Self>) {
1850 let len = self.len();
1851 let mut offset = len;
1852 for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
1853 let non_whitespace_len = chunk
1854 .trim_end_matches(|c: char| c.is_ascii_whitespace())
1855 .len();
1856 offset -= chunk.len();
1857 offset += non_whitespace_len;
1858 if non_whitespace_len != 0 {
1859 if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
1860 return;
1861 }
1862 break;
1863 }
1864 }
1865 self.edit([(offset..len, "\n")], None, cx);
1866 }
1867
1868 /// Applies a diff to the buffer. If the buffer has changed since the given diff was
1869 /// calculated, then adjust the diff to account for those changes, and discard any
1870 /// parts of the diff that conflict with those changes.
1871 pub fn apply_diff(&mut self, diff: Diff, cx: &mut Context<Self>) -> Option<TransactionId> {
1872 let snapshot = self.snapshot();
1873 let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
1874 let mut delta = 0;
1875 let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
1876 while let Some(edit_since) = edits_since.peek() {
1877 // If the edit occurs after a diff hunk, then it does not
1878 // affect that hunk.
1879 if edit_since.old.start > range.end {
1880 break;
1881 }
1882 // If the edit precedes the diff hunk, then adjust the hunk
1883 // to reflect the edit.
1884 else if edit_since.old.end < range.start {
1885 delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
1886 edits_since.next();
1887 }
1888 // If the edit intersects a diff hunk, then discard that hunk.
1889 else {
1890 return None;
1891 }
1892 }
1893
1894 let start = (range.start as i64 + delta) as usize;
1895 let end = (range.end as i64 + delta) as usize;
1896 Some((start..end, new_text))
1897 });
1898
1899 self.start_transaction();
1900 self.text.set_line_ending(diff.line_ending);
1901 self.edit(adjusted_edits, None, cx);
1902 self.end_transaction(cx)
1903 }
1904
1905 fn has_unsaved_edits(&self) -> bool {
1906 let (last_version, has_unsaved_edits) = self.has_unsaved_edits.take();
1907
1908 if last_version == self.version {
1909 self.has_unsaved_edits
1910 .set((last_version, has_unsaved_edits));
1911 return has_unsaved_edits;
1912 }
1913
1914 let has_edits = self.has_edits_since(&self.saved_version);
1915 self.has_unsaved_edits
1916 .set((self.version.clone(), has_edits));
1917 has_edits
1918 }
1919
1920 /// Checks if the buffer has unsaved changes.
1921 pub fn is_dirty(&self) -> bool {
1922 if self.capability == Capability::ReadOnly {
1923 return false;
1924 }
1925 if self.has_conflict {
1926 return true;
1927 }
1928 match self.file.as_ref().map(|f| f.disk_state()) {
1929 Some(DiskState::New) | Some(DiskState::Deleted) => {
1930 !self.is_empty() && self.has_unsaved_edits()
1931 }
1932 _ => self.has_unsaved_edits(),
1933 }
1934 }
1935
1936 /// Checks if the buffer and its file have both changed since the buffer
1937 /// was last saved or reloaded.
1938 pub fn has_conflict(&self) -> bool {
1939 if self.has_conflict {
1940 return true;
1941 }
1942 let Some(file) = self.file.as_ref() else {
1943 return false;
1944 };
1945 match file.disk_state() {
1946 DiskState::New => false,
1947 DiskState::Present { mtime } => match self.saved_mtime {
1948 Some(saved_mtime) => {
1949 mtime.bad_is_greater_than(saved_mtime) && self.has_unsaved_edits()
1950 }
1951 None => true,
1952 },
1953 DiskState::Deleted => false,
1954 }
1955 }
1956
1957 /// Gets a [`Subscription`] that tracks all of the changes to the buffer's text.
1958 pub fn subscribe(&mut self) -> Subscription {
1959 self.text.subscribe()
1960 }
1961
1962 /// Adds a bit to the list of bits that are set when the buffer's text changes.
1963 ///
1964 /// This allows downstream code to check if the buffer's text has changed without
1965 /// waiting for an effect cycle, which would be required if using eents.
1966 pub fn record_changes(&mut self, bit: rc::Weak<Cell<bool>>) {
1967 if let Err(ix) = self
1968 .change_bits
1969 .binary_search_by_key(&rc::Weak::as_ptr(&bit), rc::Weak::as_ptr)
1970 {
1971 self.change_bits.insert(ix, bit);
1972 }
1973 }
1974
1975 fn was_changed(&mut self) {
1976 self.change_bits.retain(|change_bit| {
1977 change_bit.upgrade().map_or(false, |bit| {
1978 bit.replace(true);
1979 true
1980 })
1981 });
1982 }
1983
1984 /// Starts a transaction, if one is not already in-progress. When undoing or
1985 /// redoing edits, all of the edits performed within a transaction are undone
1986 /// or redone together.
1987 pub fn start_transaction(&mut self) -> Option<TransactionId> {
1988 self.start_transaction_at(Instant::now())
1989 }
1990
1991 /// Starts a transaction, providing the current time. Subsequent transactions
1992 /// that occur within a short period of time will be grouped together. This
1993 /// is controlled by the buffer's undo grouping duration.
1994 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
1995 self.transaction_depth += 1;
1996 if self.was_dirty_before_starting_transaction.is_none() {
1997 self.was_dirty_before_starting_transaction = Some(self.is_dirty());
1998 }
1999 self.text.start_transaction_at(now)
2000 }
2001
2002 /// Terminates the current transaction, if this is the outermost transaction.
2003 pub fn end_transaction(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2004 self.end_transaction_at(Instant::now(), cx)
2005 }
2006
2007 /// Terminates the current transaction, providing the current time. Subsequent transactions
2008 /// that occur within a short period of time will be grouped together. This
2009 /// is controlled by the buffer's undo grouping duration.
2010 pub fn end_transaction_at(
2011 &mut self,
2012 now: Instant,
2013 cx: &mut Context<Self>,
2014 ) -> Option<TransactionId> {
2015 assert!(self.transaction_depth > 0);
2016 self.transaction_depth -= 1;
2017 let was_dirty = if self.transaction_depth == 0 {
2018 self.was_dirty_before_starting_transaction.take().unwrap()
2019 } else {
2020 false
2021 };
2022 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
2023 self.did_edit(&start_version, was_dirty, cx);
2024 Some(transaction_id)
2025 } else {
2026 None
2027 }
2028 }
2029
2030 /// Manually add a transaction to the buffer's undo history.
2031 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
2032 self.text.push_transaction(transaction, now);
2033 }
2034
2035 /// Prevent the last transaction from being grouped with any subsequent transactions,
2036 /// even if they occur with the buffer's undo grouping duration.
2037 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
2038 self.text.finalize_last_transaction()
2039 }
2040
2041 /// Manually group all changes since a given transaction.
2042 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
2043 self.text.group_until_transaction(transaction_id);
2044 }
2045
2046 /// Manually remove a transaction from the buffer's undo history
2047 pub fn forget_transaction(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
2048 self.text.forget_transaction(transaction_id)
2049 }
2050
2051 /// Retrieve a transaction from the buffer's undo history
2052 pub fn get_transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> {
2053 self.text.get_transaction(transaction_id)
2054 }
2055
2056 /// Manually merge two transactions in the buffer's undo history.
2057 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
2058 self.text.merge_transactions(transaction, destination);
2059 }
2060
2061 /// Waits for the buffer to receive operations with the given timestamps.
2062 pub fn wait_for_edits<It: IntoIterator<Item = clock::Lamport>>(
2063 &mut self,
2064 edit_ids: It,
2065 ) -> impl Future<Output = Result<()>> + use<It> {
2066 self.text.wait_for_edits(edit_ids)
2067 }
2068
2069 /// Waits for the buffer to receive the operations necessary for resolving the given anchors.
2070 pub fn wait_for_anchors<It: IntoIterator<Item = Anchor>>(
2071 &mut self,
2072 anchors: It,
2073 ) -> impl 'static + Future<Output = Result<()>> + use<It> {
2074 self.text.wait_for_anchors(anchors)
2075 }
2076
2077 /// Waits for the buffer to receive operations up to the given version.
2078 pub fn wait_for_version(
2079 &mut self,
2080 version: clock::Global,
2081 ) -> impl Future<Output = Result<()>> + use<> {
2082 self.text.wait_for_version(version)
2083 }
2084
2085 /// Forces all futures returned by [`Buffer::wait_for_version`], [`Buffer::wait_for_edits`], or
2086 /// [`Buffer::wait_for_version`] to resolve with an error.
2087 pub fn give_up_waiting(&mut self) {
2088 self.text.give_up_waiting();
2089 }
2090
2091 /// Stores a set of selections that should be broadcasted to all of the buffer's replicas.
2092 pub fn set_active_selections(
2093 &mut self,
2094 selections: Arc<[Selection<Anchor>]>,
2095 line_mode: bool,
2096 cursor_shape: CursorShape,
2097 cx: &mut Context<Self>,
2098 ) {
2099 let lamport_timestamp = self.text.lamport_clock.tick();
2100 self.remote_selections.insert(
2101 self.text.replica_id(),
2102 SelectionSet {
2103 selections: selections.clone(),
2104 lamport_timestamp,
2105 line_mode,
2106 cursor_shape,
2107 },
2108 );
2109 self.send_operation(
2110 Operation::UpdateSelections {
2111 selections,
2112 line_mode,
2113 lamport_timestamp,
2114 cursor_shape,
2115 },
2116 true,
2117 cx,
2118 );
2119 self.non_text_state_update_count += 1;
2120 cx.notify();
2121 }
2122
2123 /// Clears the selections, so that other replicas of the buffer do not see any selections for
2124 /// this replica.
2125 pub fn remove_active_selections(&mut self, cx: &mut Context<Self>) {
2126 if self
2127 .remote_selections
2128 .get(&self.text.replica_id())
2129 .map_or(true, |set| !set.selections.is_empty())
2130 {
2131 self.set_active_selections(Arc::default(), false, Default::default(), cx);
2132 }
2133 }
2134
2135 pub fn set_agent_selections(
2136 &mut self,
2137 selections: Arc<[Selection<Anchor>]>,
2138 line_mode: bool,
2139 cursor_shape: CursorShape,
2140 cx: &mut Context<Self>,
2141 ) {
2142 let lamport_timestamp = self.text.lamport_clock.tick();
2143 self.remote_selections.insert(
2144 AGENT_REPLICA_ID,
2145 SelectionSet {
2146 selections: selections.clone(),
2147 lamport_timestamp,
2148 line_mode,
2149 cursor_shape,
2150 },
2151 );
2152 self.non_text_state_update_count += 1;
2153 cx.notify();
2154 }
2155
2156 pub fn remove_agent_selections(&mut self, cx: &mut Context<Self>) {
2157 self.set_agent_selections(Arc::default(), false, Default::default(), cx);
2158 }
2159
2160 /// Replaces the buffer's entire text.
2161 pub fn set_text<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2162 where
2163 T: Into<Arc<str>>,
2164 {
2165 self.autoindent_requests.clear();
2166 self.edit([(0..self.len(), text)], None, cx)
2167 }
2168
2169 /// Appends the given text to the end of the buffer.
2170 pub fn append<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2171 where
2172 T: Into<Arc<str>>,
2173 {
2174 self.edit([(self.len()..self.len(), text)], None, cx)
2175 }
2176
2177 /// Applies the given edits to the buffer. Each edit is specified as a range of text to
2178 /// delete, and a string of text to insert at that location.
2179 ///
2180 /// If an [`AutoindentMode`] is provided, then the buffer will enqueue an auto-indent
2181 /// request for the edited ranges, which will be processed when the buffer finishes
2182 /// parsing.
2183 ///
2184 /// Parsing takes place at the end of a transaction, and may compute synchronously
2185 /// or asynchronously, depending on the changes.
2186 pub fn edit<I, S, T>(
2187 &mut self,
2188 edits_iter: I,
2189 autoindent_mode: Option<AutoindentMode>,
2190 cx: &mut Context<Self>,
2191 ) -> Option<clock::Lamport>
2192 where
2193 I: IntoIterator<Item = (Range<S>, T)>,
2194 S: ToOffset,
2195 T: Into<Arc<str>>,
2196 {
2197 // Skip invalid edits and coalesce contiguous ones.
2198 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
2199
2200 for (range, new_text) in edits_iter {
2201 let mut range = range.start.to_offset(self)..range.end.to_offset(self);
2202
2203 if range.start > range.end {
2204 mem::swap(&mut range.start, &mut range.end);
2205 }
2206 let new_text = new_text.into();
2207 if !new_text.is_empty() || !range.is_empty() {
2208 if let Some((prev_range, prev_text)) = edits.last_mut() {
2209 if prev_range.end >= range.start {
2210 prev_range.end = cmp::max(prev_range.end, range.end);
2211 *prev_text = format!("{prev_text}{new_text}").into();
2212 } else {
2213 edits.push((range, new_text));
2214 }
2215 } else {
2216 edits.push((range, new_text));
2217 }
2218 }
2219 }
2220 if edits.is_empty() {
2221 return None;
2222 }
2223
2224 self.start_transaction();
2225 self.pending_autoindent.take();
2226 let autoindent_request = autoindent_mode
2227 .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
2228
2229 let edit_operation = self.text.edit(edits.iter().cloned());
2230 let edit_id = edit_operation.timestamp();
2231
2232 if let Some((before_edit, mode)) = autoindent_request {
2233 let mut delta = 0isize;
2234 let entries = edits
2235 .into_iter()
2236 .enumerate()
2237 .zip(&edit_operation.as_edit().unwrap().new_text)
2238 .map(|((ix, (range, _)), new_text)| {
2239 let new_text_length = new_text.len();
2240 let old_start = range.start.to_point(&before_edit);
2241 let new_start = (delta + range.start as isize) as usize;
2242 let range_len = range.end - range.start;
2243 delta += new_text_length as isize - range_len as isize;
2244
2245 // Decide what range of the insertion to auto-indent, and whether
2246 // the first line of the insertion should be considered a newly-inserted line
2247 // or an edit to an existing line.
2248 let mut range_of_insertion_to_indent = 0..new_text_length;
2249 let mut first_line_is_new = true;
2250
2251 let old_line_start = before_edit.indent_size_for_line(old_start.row).len;
2252 let old_line_end = before_edit.line_len(old_start.row);
2253
2254 if old_start.column > old_line_start {
2255 first_line_is_new = false;
2256 }
2257
2258 if !new_text.contains('\n')
2259 && (old_start.column + (range_len as u32) < old_line_end
2260 || old_line_end == old_line_start)
2261 {
2262 first_line_is_new = false;
2263 }
2264
2265 // When inserting text starting with a newline, avoid auto-indenting the
2266 // previous line.
2267 if new_text.starts_with('\n') {
2268 range_of_insertion_to_indent.start += 1;
2269 first_line_is_new = true;
2270 }
2271
2272 let mut original_indent_column = None;
2273 if let AutoindentMode::Block {
2274 original_indent_columns,
2275 } = &mode
2276 {
2277 original_indent_column = Some(if new_text.starts_with('\n') {
2278 indent_size_for_text(
2279 new_text[range_of_insertion_to_indent.clone()].chars(),
2280 )
2281 .len
2282 } else {
2283 original_indent_columns
2284 .get(ix)
2285 .copied()
2286 .flatten()
2287 .unwrap_or_else(|| {
2288 indent_size_for_text(
2289 new_text[range_of_insertion_to_indent.clone()].chars(),
2290 )
2291 .len
2292 })
2293 });
2294
2295 // Avoid auto-indenting the line after the edit.
2296 if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
2297 range_of_insertion_to_indent.end -= 1;
2298 }
2299 }
2300
2301 AutoindentRequestEntry {
2302 first_line_is_new,
2303 original_indent_column,
2304 indent_size: before_edit.language_indent_size_at(range.start, cx),
2305 range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
2306 ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
2307 }
2308 })
2309 .collect();
2310
2311 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2312 before_edit,
2313 entries,
2314 is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
2315 ignore_empty_lines: false,
2316 }));
2317 }
2318
2319 self.end_transaction(cx);
2320 self.send_operation(Operation::Buffer(edit_operation), true, cx);
2321 Some(edit_id)
2322 }
2323
2324 fn did_edit(&mut self, old_version: &clock::Global, was_dirty: bool, cx: &mut Context<Self>) {
2325 self.was_changed();
2326
2327 if self.edits_since::<usize>(old_version).next().is_none() {
2328 return;
2329 }
2330
2331 self.reparse(cx);
2332 cx.emit(BufferEvent::Edited);
2333 if was_dirty != self.is_dirty() {
2334 cx.emit(BufferEvent::DirtyChanged);
2335 }
2336 cx.notify();
2337 }
2338
2339 pub fn autoindent_ranges<I, T>(&mut self, ranges: I, cx: &mut Context<Self>)
2340 where
2341 I: IntoIterator<Item = Range<T>>,
2342 T: ToOffset + Copy,
2343 {
2344 let before_edit = self.snapshot();
2345 let entries = ranges
2346 .into_iter()
2347 .map(|range| AutoindentRequestEntry {
2348 range: before_edit.anchor_before(range.start)..before_edit.anchor_after(range.end),
2349 first_line_is_new: true,
2350 indent_size: before_edit.language_indent_size_at(range.start, cx),
2351 original_indent_column: None,
2352 })
2353 .collect();
2354 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2355 before_edit,
2356 entries,
2357 is_block_mode: false,
2358 ignore_empty_lines: true,
2359 }));
2360 self.request_autoindent(cx);
2361 }
2362
2363 // Inserts newlines at the given position to create an empty line, returning the start of the new line.
2364 // You can also request the insertion of empty lines above and below the line starting at the returned point.
2365 pub fn insert_empty_line(
2366 &mut self,
2367 position: impl ToPoint,
2368 space_above: bool,
2369 space_below: bool,
2370 cx: &mut Context<Self>,
2371 ) -> Point {
2372 let mut position = position.to_point(self);
2373
2374 self.start_transaction();
2375
2376 self.edit(
2377 [(position..position, "\n")],
2378 Some(AutoindentMode::EachLine),
2379 cx,
2380 );
2381
2382 if position.column > 0 {
2383 position += Point::new(1, 0);
2384 }
2385
2386 if !self.is_line_blank(position.row) {
2387 self.edit(
2388 [(position..position, "\n")],
2389 Some(AutoindentMode::EachLine),
2390 cx,
2391 );
2392 }
2393
2394 if space_above && position.row > 0 && !self.is_line_blank(position.row - 1) {
2395 self.edit(
2396 [(position..position, "\n")],
2397 Some(AutoindentMode::EachLine),
2398 cx,
2399 );
2400 position.row += 1;
2401 }
2402
2403 if space_below
2404 && (position.row == self.max_point().row || !self.is_line_blank(position.row + 1))
2405 {
2406 self.edit(
2407 [(position..position, "\n")],
2408 Some(AutoindentMode::EachLine),
2409 cx,
2410 );
2411 }
2412
2413 self.end_transaction(cx);
2414
2415 position
2416 }
2417
2418 /// Applies the given remote operations to the buffer.
2419 pub fn apply_ops<I: IntoIterator<Item = Operation>>(&mut self, ops: I, cx: &mut Context<Self>) {
2420 self.pending_autoindent.take();
2421 let was_dirty = self.is_dirty();
2422 let old_version = self.version.clone();
2423 let mut deferred_ops = Vec::new();
2424 let buffer_ops = ops
2425 .into_iter()
2426 .filter_map(|op| match op {
2427 Operation::Buffer(op) => Some(op),
2428 _ => {
2429 if self.can_apply_op(&op) {
2430 self.apply_op(op, cx);
2431 } else {
2432 deferred_ops.push(op);
2433 }
2434 None
2435 }
2436 })
2437 .collect::<Vec<_>>();
2438 for operation in buffer_ops.iter() {
2439 self.send_operation(Operation::Buffer(operation.clone()), false, cx);
2440 }
2441 self.text.apply_ops(buffer_ops);
2442 self.deferred_ops.insert(deferred_ops);
2443 self.flush_deferred_ops(cx);
2444 self.did_edit(&old_version, was_dirty, cx);
2445 // Notify independently of whether the buffer was edited as the operations could include a
2446 // selection update.
2447 cx.notify();
2448 }
2449
2450 fn flush_deferred_ops(&mut self, cx: &mut Context<Self>) {
2451 let mut deferred_ops = Vec::new();
2452 for op in self.deferred_ops.drain().iter().cloned() {
2453 if self.can_apply_op(&op) {
2454 self.apply_op(op, cx);
2455 } else {
2456 deferred_ops.push(op);
2457 }
2458 }
2459 self.deferred_ops.insert(deferred_ops);
2460 }
2461
2462 pub fn has_deferred_ops(&self) -> bool {
2463 !self.deferred_ops.is_empty() || self.text.has_deferred_ops()
2464 }
2465
2466 fn can_apply_op(&self, operation: &Operation) -> bool {
2467 match operation {
2468 Operation::Buffer(_) => {
2469 unreachable!("buffer operations should never be applied at this layer")
2470 }
2471 Operation::UpdateDiagnostics {
2472 diagnostics: diagnostic_set,
2473 ..
2474 } => diagnostic_set.iter().all(|diagnostic| {
2475 self.text.can_resolve(&diagnostic.range.start)
2476 && self.text.can_resolve(&diagnostic.range.end)
2477 }),
2478 Operation::UpdateSelections { selections, .. } => selections
2479 .iter()
2480 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
2481 Operation::UpdateCompletionTriggers { .. } => true,
2482 }
2483 }
2484
2485 fn apply_op(&mut self, operation: Operation, cx: &mut Context<Self>) {
2486 match operation {
2487 Operation::Buffer(_) => {
2488 unreachable!("buffer operations should never be applied at this layer")
2489 }
2490 Operation::UpdateDiagnostics {
2491 server_id,
2492 diagnostics: diagnostic_set,
2493 lamport_timestamp,
2494 } => {
2495 let snapshot = self.snapshot();
2496 self.apply_diagnostic_update(
2497 server_id,
2498 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
2499 lamport_timestamp,
2500 cx,
2501 );
2502 }
2503 Operation::UpdateSelections {
2504 selections,
2505 lamport_timestamp,
2506 line_mode,
2507 cursor_shape,
2508 } => {
2509 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id) {
2510 if set.lamport_timestamp > lamport_timestamp {
2511 return;
2512 }
2513 }
2514
2515 self.remote_selections.insert(
2516 lamport_timestamp.replica_id,
2517 SelectionSet {
2518 selections,
2519 lamport_timestamp,
2520 line_mode,
2521 cursor_shape,
2522 },
2523 );
2524 self.text.lamport_clock.observe(lamport_timestamp);
2525 self.non_text_state_update_count += 1;
2526 }
2527 Operation::UpdateCompletionTriggers {
2528 triggers,
2529 lamport_timestamp,
2530 server_id,
2531 } => {
2532 if triggers.is_empty() {
2533 self.completion_triggers_per_language_server
2534 .remove(&server_id);
2535 self.completion_triggers = self
2536 .completion_triggers_per_language_server
2537 .values()
2538 .flat_map(|triggers| triggers.into_iter().cloned())
2539 .collect();
2540 } else {
2541 self.completion_triggers_per_language_server
2542 .insert(server_id, triggers.iter().cloned().collect());
2543 self.completion_triggers.extend(triggers);
2544 }
2545 self.text.lamport_clock.observe(lamport_timestamp);
2546 }
2547 }
2548 }
2549
2550 fn apply_diagnostic_update(
2551 &mut self,
2552 server_id: LanguageServerId,
2553 diagnostics: DiagnosticSet,
2554 lamport_timestamp: clock::Lamport,
2555 cx: &mut Context<Self>,
2556 ) {
2557 if lamport_timestamp > self.diagnostics_timestamp {
2558 let ix = self.diagnostics.binary_search_by_key(&server_id, |e| e.0);
2559 if diagnostics.is_empty() {
2560 if let Ok(ix) = ix {
2561 self.diagnostics.remove(ix);
2562 }
2563 } else {
2564 match ix {
2565 Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)),
2566 Ok(ix) => self.diagnostics[ix].1 = diagnostics,
2567 };
2568 }
2569 self.diagnostics_timestamp = lamport_timestamp;
2570 self.non_text_state_update_count += 1;
2571 self.text.lamport_clock.observe(lamport_timestamp);
2572 cx.notify();
2573 cx.emit(BufferEvent::DiagnosticsUpdated);
2574 }
2575 }
2576
2577 fn send_operation(&mut self, operation: Operation, is_local: bool, cx: &mut Context<Self>) {
2578 self.was_changed();
2579 cx.emit(BufferEvent::Operation {
2580 operation,
2581 is_local,
2582 });
2583 }
2584
2585 /// Removes the selections for a given peer.
2586 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut Context<Self>) {
2587 self.remote_selections.remove(&replica_id);
2588 cx.notify();
2589 }
2590
2591 /// Undoes the most recent transaction.
2592 pub fn undo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2593 let was_dirty = self.is_dirty();
2594 let old_version = self.version.clone();
2595
2596 if let Some((transaction_id, operation)) = self.text.undo() {
2597 self.send_operation(Operation::Buffer(operation), true, cx);
2598 self.did_edit(&old_version, was_dirty, cx);
2599 Some(transaction_id)
2600 } else {
2601 None
2602 }
2603 }
2604
2605 /// Manually undoes a specific transaction in the buffer's undo history.
2606 pub fn undo_transaction(
2607 &mut self,
2608 transaction_id: TransactionId,
2609 cx: &mut Context<Self>,
2610 ) -> bool {
2611 let was_dirty = self.is_dirty();
2612 let old_version = self.version.clone();
2613 if let Some(operation) = self.text.undo_transaction(transaction_id) {
2614 self.send_operation(Operation::Buffer(operation), true, cx);
2615 self.did_edit(&old_version, was_dirty, cx);
2616 true
2617 } else {
2618 false
2619 }
2620 }
2621
2622 /// Manually undoes all changes after a given transaction in the buffer's undo history.
2623 pub fn undo_to_transaction(
2624 &mut self,
2625 transaction_id: TransactionId,
2626 cx: &mut Context<Self>,
2627 ) -> bool {
2628 let was_dirty = self.is_dirty();
2629 let old_version = self.version.clone();
2630
2631 let operations = self.text.undo_to_transaction(transaction_id);
2632 let undone = !operations.is_empty();
2633 for operation in operations {
2634 self.send_operation(Operation::Buffer(operation), true, cx);
2635 }
2636 if undone {
2637 self.did_edit(&old_version, was_dirty, cx)
2638 }
2639 undone
2640 }
2641
2642 pub fn undo_operations(&mut self, counts: HashMap<Lamport, u32>, cx: &mut Context<Buffer>) {
2643 let was_dirty = self.is_dirty();
2644 let operation = self.text.undo_operations(counts);
2645 let old_version = self.version.clone();
2646 self.send_operation(Operation::Buffer(operation), true, cx);
2647 self.did_edit(&old_version, was_dirty, cx);
2648 }
2649
2650 /// Manually redoes a specific transaction in the buffer's redo history.
2651 pub fn redo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2652 let was_dirty = self.is_dirty();
2653 let old_version = self.version.clone();
2654
2655 if let Some((transaction_id, operation)) = self.text.redo() {
2656 self.send_operation(Operation::Buffer(operation), true, cx);
2657 self.did_edit(&old_version, was_dirty, cx);
2658 Some(transaction_id)
2659 } else {
2660 None
2661 }
2662 }
2663
2664 /// Manually undoes all changes until a given transaction in the buffer's redo history.
2665 pub fn redo_to_transaction(
2666 &mut self,
2667 transaction_id: TransactionId,
2668 cx: &mut Context<Self>,
2669 ) -> bool {
2670 let was_dirty = self.is_dirty();
2671 let old_version = self.version.clone();
2672
2673 let operations = self.text.redo_to_transaction(transaction_id);
2674 let redone = !operations.is_empty();
2675 for operation in operations {
2676 self.send_operation(Operation::Buffer(operation), true, cx);
2677 }
2678 if redone {
2679 self.did_edit(&old_version, was_dirty, cx)
2680 }
2681 redone
2682 }
2683
2684 /// Override current completion triggers with the user-provided completion triggers.
2685 pub fn set_completion_triggers(
2686 &mut self,
2687 server_id: LanguageServerId,
2688 triggers: BTreeSet<String>,
2689 cx: &mut Context<Self>,
2690 ) {
2691 self.completion_triggers_timestamp = self.text.lamport_clock.tick();
2692 if triggers.is_empty() {
2693 self.completion_triggers_per_language_server
2694 .remove(&server_id);
2695 self.completion_triggers = self
2696 .completion_triggers_per_language_server
2697 .values()
2698 .flat_map(|triggers| triggers.into_iter().cloned())
2699 .collect();
2700 } else {
2701 self.completion_triggers_per_language_server
2702 .insert(server_id, triggers.clone());
2703 self.completion_triggers.extend(triggers.iter().cloned());
2704 }
2705 self.send_operation(
2706 Operation::UpdateCompletionTriggers {
2707 triggers: triggers.iter().cloned().collect(),
2708 lamport_timestamp: self.completion_triggers_timestamp,
2709 server_id,
2710 },
2711 true,
2712 cx,
2713 );
2714 cx.notify();
2715 }
2716
2717 /// Returns a list of strings which trigger a completion menu for this language.
2718 /// Usually this is driven by LSP server which returns a list of trigger characters for completions.
2719 pub fn completion_triggers(&self) -> &BTreeSet<String> {
2720 &self.completion_triggers
2721 }
2722
2723 /// Call this directly after performing edits to prevent the preview tab
2724 /// from being dismissed by those edits. It causes `should_dismiss_preview`
2725 /// to return false until there are additional edits.
2726 pub fn refresh_preview(&mut self) {
2727 self.preview_version = self.version.clone();
2728 }
2729
2730 /// Whether we should preserve the preview status of a tab containing this buffer.
2731 pub fn preserve_preview(&self) -> bool {
2732 !self.has_edits_since(&self.preview_version)
2733 }
2734}
2735
2736#[doc(hidden)]
2737#[cfg(any(test, feature = "test-support"))]
2738impl Buffer {
2739 pub fn edit_via_marked_text(
2740 &mut self,
2741 marked_string: &str,
2742 autoindent_mode: Option<AutoindentMode>,
2743 cx: &mut Context<Self>,
2744 ) {
2745 let edits = self.edits_for_marked_text(marked_string);
2746 self.edit(edits, autoindent_mode, cx);
2747 }
2748
2749 pub fn set_group_interval(&mut self, group_interval: Duration) {
2750 self.text.set_group_interval(group_interval);
2751 }
2752
2753 pub fn randomly_edit<T>(&mut self, rng: &mut T, old_range_count: usize, cx: &mut Context<Self>)
2754 where
2755 T: rand::Rng,
2756 {
2757 let mut edits: Vec<(Range<usize>, String)> = Vec::new();
2758 let mut last_end = None;
2759 for _ in 0..old_range_count {
2760 if last_end.map_or(false, |last_end| last_end >= self.len()) {
2761 break;
2762 }
2763
2764 let new_start = last_end.map_or(0, |last_end| last_end + 1);
2765 let mut range = self.random_byte_range(new_start, rng);
2766 if rng.gen_bool(0.2) {
2767 mem::swap(&mut range.start, &mut range.end);
2768 }
2769 last_end = Some(range.end);
2770
2771 let new_text_len = rng.gen_range(0..10);
2772 let mut new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
2773 new_text = new_text.to_uppercase();
2774
2775 edits.push((range, new_text));
2776 }
2777 log::info!("mutating buffer {} with {:?}", self.replica_id(), edits);
2778 self.edit(edits, None, cx);
2779 }
2780
2781 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut Context<Self>) {
2782 let was_dirty = self.is_dirty();
2783 let old_version = self.version.clone();
2784
2785 let ops = self.text.randomly_undo_redo(rng);
2786 if !ops.is_empty() {
2787 for op in ops {
2788 self.send_operation(Operation::Buffer(op), true, cx);
2789 self.did_edit(&old_version, was_dirty, cx);
2790 }
2791 }
2792 }
2793}
2794
2795impl EventEmitter<BufferEvent> for Buffer {}
2796
2797impl Deref for Buffer {
2798 type Target = TextBuffer;
2799
2800 fn deref(&self) -> &Self::Target {
2801 &self.text
2802 }
2803}
2804
2805impl BufferSnapshot {
2806 /// Returns [`IndentSize`] for a given line that respects user settings and
2807 /// language preferences.
2808 pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
2809 indent_size_for_line(self, row)
2810 }
2811
2812 /// Returns [`IndentSize`] for a given position that respects user settings
2813 /// and language preferences.
2814 pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &App) -> IndentSize {
2815 let settings = language_settings(
2816 self.language_at(position).map(|l| l.name()),
2817 self.file(),
2818 cx,
2819 );
2820 if settings.hard_tabs {
2821 IndentSize::tab()
2822 } else {
2823 IndentSize::spaces(settings.tab_size.get())
2824 }
2825 }
2826
2827 /// Retrieve the suggested indent size for all of the given rows. The unit of indentation
2828 /// is passed in as `single_indent_size`.
2829 pub fn suggested_indents(
2830 &self,
2831 rows: impl Iterator<Item = u32>,
2832 single_indent_size: IndentSize,
2833 ) -> BTreeMap<u32, IndentSize> {
2834 let mut result = BTreeMap::new();
2835
2836 for row_range in contiguous_ranges(rows, 10) {
2837 let suggestions = match self.suggest_autoindents(row_range.clone()) {
2838 Some(suggestions) => suggestions,
2839 _ => break,
2840 };
2841
2842 for (row, suggestion) in row_range.zip(suggestions) {
2843 let indent_size = if let Some(suggestion) = suggestion {
2844 result
2845 .get(&suggestion.basis_row)
2846 .copied()
2847 .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
2848 .with_delta(suggestion.delta, single_indent_size)
2849 } else {
2850 self.indent_size_for_line(row)
2851 };
2852
2853 result.insert(row, indent_size);
2854 }
2855 }
2856
2857 result
2858 }
2859
2860 fn suggest_autoindents(
2861 &self,
2862 row_range: Range<u32>,
2863 ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
2864 let config = &self.language.as_ref()?.config;
2865 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
2866
2867 // Find the suggested indentation ranges based on the syntax tree.
2868 let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
2869 let end = Point::new(row_range.end, 0);
2870 let range = (start..end).to_offset(&self.text);
2871 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2872 Some(&grammar.indents_config.as_ref()?.query)
2873 });
2874 let indent_configs = matches
2875 .grammars()
2876 .iter()
2877 .map(|grammar| grammar.indents_config.as_ref().unwrap())
2878 .collect::<Vec<_>>();
2879
2880 let mut indent_ranges = Vec::<Range<Point>>::new();
2881 let mut outdent_positions = Vec::<Point>::new();
2882 while let Some(mat) = matches.peek() {
2883 let mut start: Option<Point> = None;
2884 let mut end: Option<Point> = None;
2885
2886 let config = &indent_configs[mat.grammar_index];
2887 for capture in mat.captures {
2888 if capture.index == config.indent_capture_ix {
2889 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
2890 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
2891 } else if Some(capture.index) == config.start_capture_ix {
2892 start = Some(Point::from_ts_point(capture.node.end_position()));
2893 } else if Some(capture.index) == config.end_capture_ix {
2894 end = Some(Point::from_ts_point(capture.node.start_position()));
2895 } else if Some(capture.index) == config.outdent_capture_ix {
2896 outdent_positions.push(Point::from_ts_point(capture.node.start_position()));
2897 }
2898 }
2899
2900 matches.advance();
2901 if let Some((start, end)) = start.zip(end) {
2902 if start.row == end.row {
2903 continue;
2904 }
2905
2906 let range = start..end;
2907 match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
2908 Err(ix) => indent_ranges.insert(ix, range),
2909 Ok(ix) => {
2910 let prev_range = &mut indent_ranges[ix];
2911 prev_range.end = prev_range.end.max(range.end);
2912 }
2913 }
2914 }
2915 }
2916
2917 let mut error_ranges = Vec::<Range<Point>>::new();
2918 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2919 grammar.error_query.as_ref()
2920 });
2921 while let Some(mat) = matches.peek() {
2922 let node = mat.captures[0].node;
2923 let start = Point::from_ts_point(node.start_position());
2924 let end = Point::from_ts_point(node.end_position());
2925 let range = start..end;
2926 let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
2927 Ok(ix) | Err(ix) => ix,
2928 };
2929 let mut end_ix = ix;
2930 while let Some(existing_range) = error_ranges.get(end_ix) {
2931 if existing_range.end < end {
2932 end_ix += 1;
2933 } else {
2934 break;
2935 }
2936 }
2937 error_ranges.splice(ix..end_ix, [range]);
2938 matches.advance();
2939 }
2940
2941 outdent_positions.sort();
2942 for outdent_position in outdent_positions {
2943 // find the innermost indent range containing this outdent_position
2944 // set its end to the outdent position
2945 if let Some(range_to_truncate) = indent_ranges
2946 .iter_mut()
2947 .filter(|indent_range| indent_range.contains(&outdent_position))
2948 .next_back()
2949 {
2950 range_to_truncate.end = outdent_position;
2951 }
2952 }
2953
2954 // Find the suggested indentation increases and decreased based on regexes.
2955 let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
2956 self.for_each_line(
2957 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
2958 ..Point::new(row_range.end, 0),
2959 |row, line| {
2960 if config
2961 .decrease_indent_pattern
2962 .as_ref()
2963 .map_or(false, |regex| regex.is_match(line))
2964 {
2965 indent_change_rows.push((row, Ordering::Less));
2966 }
2967 if config
2968 .increase_indent_pattern
2969 .as_ref()
2970 .map_or(false, |regex| regex.is_match(line))
2971 {
2972 indent_change_rows.push((row + 1, Ordering::Greater));
2973 }
2974 },
2975 );
2976
2977 let mut indent_changes = indent_change_rows.into_iter().peekable();
2978 let mut prev_row = if config.auto_indent_using_last_non_empty_line {
2979 prev_non_blank_row.unwrap_or(0)
2980 } else {
2981 row_range.start.saturating_sub(1)
2982 };
2983 let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
2984 Some(row_range.map(move |row| {
2985 let row_start = Point::new(row, self.indent_size_for_line(row).len);
2986
2987 let mut indent_from_prev_row = false;
2988 let mut outdent_from_prev_row = false;
2989 let mut outdent_to_row = u32::MAX;
2990 let mut from_regex = false;
2991
2992 while let Some((indent_row, delta)) = indent_changes.peek() {
2993 match indent_row.cmp(&row) {
2994 Ordering::Equal => match delta {
2995 Ordering::Less => {
2996 from_regex = true;
2997 outdent_from_prev_row = true
2998 }
2999 Ordering::Greater => {
3000 indent_from_prev_row = true;
3001 from_regex = true
3002 }
3003 _ => {}
3004 },
3005
3006 Ordering::Greater => break,
3007 Ordering::Less => {}
3008 }
3009
3010 indent_changes.next();
3011 }
3012
3013 for range in &indent_ranges {
3014 if range.start.row >= row {
3015 break;
3016 }
3017 if range.start.row == prev_row && range.end > row_start {
3018 indent_from_prev_row = true;
3019 }
3020 if range.end > prev_row_start && range.end <= row_start {
3021 outdent_to_row = outdent_to_row.min(range.start.row);
3022 }
3023 }
3024
3025 let within_error = error_ranges
3026 .iter()
3027 .any(|e| e.start.row < row && e.end > row_start);
3028
3029 let suggestion = if outdent_to_row == prev_row
3030 || (outdent_from_prev_row && indent_from_prev_row)
3031 {
3032 Some(IndentSuggestion {
3033 basis_row: prev_row,
3034 delta: Ordering::Equal,
3035 within_error: within_error && !from_regex,
3036 })
3037 } else if indent_from_prev_row {
3038 Some(IndentSuggestion {
3039 basis_row: prev_row,
3040 delta: Ordering::Greater,
3041 within_error: within_error && !from_regex,
3042 })
3043 } else if outdent_to_row < prev_row {
3044 Some(IndentSuggestion {
3045 basis_row: outdent_to_row,
3046 delta: Ordering::Equal,
3047 within_error: within_error && !from_regex,
3048 })
3049 } else if outdent_from_prev_row {
3050 Some(IndentSuggestion {
3051 basis_row: prev_row,
3052 delta: Ordering::Less,
3053 within_error: within_error && !from_regex,
3054 })
3055 } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
3056 {
3057 Some(IndentSuggestion {
3058 basis_row: prev_row,
3059 delta: Ordering::Equal,
3060 within_error: within_error && !from_regex,
3061 })
3062 } else {
3063 None
3064 };
3065
3066 prev_row = row;
3067 prev_row_start = row_start;
3068 suggestion
3069 }))
3070 }
3071
3072 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
3073 while row > 0 {
3074 row -= 1;
3075 if !self.is_line_blank(row) {
3076 return Some(row);
3077 }
3078 }
3079 None
3080 }
3081
3082 fn get_highlights(&self, range: Range<usize>) -> (SyntaxMapCaptures, Vec<HighlightMap>) {
3083 let captures = self.syntax.captures(range, &self.text, |grammar| {
3084 grammar.highlights_query.as_ref()
3085 });
3086 let highlight_maps = captures
3087 .grammars()
3088 .iter()
3089 .map(|grammar| grammar.highlight_map())
3090 .collect();
3091 (captures, highlight_maps)
3092 }
3093
3094 /// Iterates over chunks of text in the given range of the buffer. Text is chunked
3095 /// in an arbitrary way due to being stored in a [`Rope`](text::Rope). The text is also
3096 /// returned in chunks where each chunk has a single syntax highlighting style and
3097 /// diagnostic status.
3098 pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks {
3099 let range = range.start.to_offset(self)..range.end.to_offset(self);
3100
3101 let mut syntax = None;
3102 if language_aware {
3103 syntax = Some(self.get_highlights(range.clone()));
3104 }
3105 // We want to look at diagnostic spans only when iterating over language-annotated chunks.
3106 let diagnostics = language_aware;
3107 BufferChunks::new(self.text.as_rope(), range, syntax, diagnostics, Some(self))
3108 }
3109
3110 pub fn highlighted_text_for_range<T: ToOffset>(
3111 &self,
3112 range: Range<T>,
3113 override_style: Option<HighlightStyle>,
3114 syntax_theme: &SyntaxTheme,
3115 ) -> HighlightedText {
3116 HighlightedText::from_buffer_range(
3117 range,
3118 &self.text,
3119 &self.syntax,
3120 override_style,
3121 syntax_theme,
3122 )
3123 }
3124
3125 /// Invokes the given callback for each line of text in the given range of the buffer.
3126 /// Uses callback to avoid allocating a string for each line.
3127 fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
3128 let mut line = String::new();
3129 let mut row = range.start.row;
3130 for chunk in self
3131 .as_rope()
3132 .chunks_in_range(range.to_offset(self))
3133 .chain(["\n"])
3134 {
3135 for (newline_ix, text) in chunk.split('\n').enumerate() {
3136 if newline_ix > 0 {
3137 callback(row, &line);
3138 row += 1;
3139 line.clear();
3140 }
3141 line.push_str(text);
3142 }
3143 }
3144 }
3145
3146 /// Iterates over every [`SyntaxLayer`] in the buffer.
3147 pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayer> + '_ {
3148 self.syntax
3149 .layers_for_range(0..self.len(), &self.text, true)
3150 }
3151
3152 pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayer> {
3153 let offset = position.to_offset(self);
3154 self.syntax
3155 .layers_for_range(offset..offset, &self.text, false)
3156 .filter(|l| l.node().end_byte() > offset)
3157 .last()
3158 }
3159
3160 pub fn smallest_syntax_layer_containing<D: ToOffset>(
3161 &self,
3162 range: Range<D>,
3163 ) -> Option<SyntaxLayer> {
3164 let range = range.to_offset(self);
3165 return self
3166 .syntax
3167 .layers_for_range(range, &self.text, false)
3168 .max_by(|a, b| {
3169 if a.depth != b.depth {
3170 a.depth.cmp(&b.depth)
3171 } else if a.offset.0 != b.offset.0 {
3172 a.offset.0.cmp(&b.offset.0)
3173 } else {
3174 a.node().end_byte().cmp(&b.node().end_byte()).reverse()
3175 }
3176 });
3177 }
3178
3179 /// Returns the main [`Language`].
3180 pub fn language(&self) -> Option<&Arc<Language>> {
3181 self.language.as_ref()
3182 }
3183
3184 /// Returns the [`Language`] at the given location.
3185 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
3186 self.syntax_layer_at(position)
3187 .map(|info| info.language)
3188 .or(self.language.as_ref())
3189 }
3190
3191 /// Returns the settings for the language at the given location.
3192 pub fn settings_at<'a, D: ToOffset>(
3193 &'a self,
3194 position: D,
3195 cx: &'a App,
3196 ) -> Cow<'a, LanguageSettings> {
3197 language_settings(
3198 self.language_at(position).map(|l| l.name()),
3199 self.file.as_ref(),
3200 cx,
3201 )
3202 }
3203
3204 pub fn char_classifier_at<T: ToOffset>(&self, point: T) -> CharClassifier {
3205 CharClassifier::new(self.language_scope_at(point))
3206 }
3207
3208 /// Returns the [`LanguageScope`] at the given location.
3209 pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
3210 let offset = position.to_offset(self);
3211 let mut scope = None;
3212 let mut smallest_range_and_depth: Option<(Range<usize>, usize)> = None;
3213
3214 // Use the layer that has the smallest node intersecting the given point.
3215 for layer in self
3216 .syntax
3217 .layers_for_range(offset..offset, &self.text, false)
3218 {
3219 let mut cursor = layer.node().walk();
3220
3221 let mut range = None;
3222 loop {
3223 let child_range = cursor.node().byte_range();
3224 if !child_range.contains(&offset) {
3225 break;
3226 }
3227
3228 range = Some(child_range);
3229 if cursor.goto_first_child_for_byte(offset).is_none() {
3230 break;
3231 }
3232 }
3233
3234 if let Some(range) = range {
3235 if smallest_range_and_depth.as_ref().map_or(
3236 true,
3237 |(smallest_range, smallest_range_depth)| {
3238 if layer.depth > *smallest_range_depth {
3239 true
3240 } else if layer.depth == *smallest_range_depth {
3241 range.len() < smallest_range.len()
3242 } else {
3243 false
3244 }
3245 },
3246 ) {
3247 smallest_range_and_depth = Some((range, layer.depth));
3248 scope = Some(LanguageScope {
3249 language: layer.language.clone(),
3250 override_id: layer.override_id(offset, &self.text),
3251 });
3252 }
3253 }
3254 }
3255
3256 scope.or_else(|| {
3257 self.language.clone().map(|language| LanguageScope {
3258 language,
3259 override_id: None,
3260 })
3261 })
3262 }
3263
3264 /// Returns a tuple of the range and character kind of the word
3265 /// surrounding the given position.
3266 pub fn surrounding_word<T: ToOffset>(&self, start: T) -> (Range<usize>, Option<CharKind>) {
3267 let mut start = start.to_offset(self);
3268 let mut end = start;
3269 let mut next_chars = self.chars_at(start).peekable();
3270 let mut prev_chars = self.reversed_chars_at(start).peekable();
3271
3272 let classifier = self.char_classifier_at(start);
3273 let word_kind = cmp::max(
3274 prev_chars.peek().copied().map(|c| classifier.kind(c)),
3275 next_chars.peek().copied().map(|c| classifier.kind(c)),
3276 );
3277
3278 for ch in prev_chars {
3279 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3280 start -= ch.len_utf8();
3281 } else {
3282 break;
3283 }
3284 }
3285
3286 for ch in next_chars {
3287 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3288 end += ch.len_utf8();
3289 } else {
3290 break;
3291 }
3292 }
3293
3294 (start..end, word_kind)
3295 }
3296
3297 /// Returns the closest syntax node enclosing the given range.
3298 pub fn syntax_ancestor<'a, T: ToOffset>(
3299 &'a self,
3300 range: Range<T>,
3301 ) -> Option<tree_sitter::Node<'a>> {
3302 let range = range.start.to_offset(self)..range.end.to_offset(self);
3303 let mut result: Option<tree_sitter::Node<'a>> = None;
3304 'outer: for layer in self
3305 .syntax
3306 .layers_for_range(range.clone(), &self.text, true)
3307 {
3308 let mut cursor = layer.node().walk();
3309
3310 // Descend to the first leaf that touches the start of the range.
3311 //
3312 // If the range is non-empty and the current node ends exactly at the start,
3313 // move to the next sibling to find a node that extends beyond the start.
3314 //
3315 // If the range is empty and the current node starts after the range position,
3316 // move to the previous sibling to find the node that contains the position.
3317 while cursor.goto_first_child_for_byte(range.start).is_some() {
3318 if !range.is_empty() && cursor.node().end_byte() == range.start {
3319 cursor.goto_next_sibling();
3320 }
3321 if range.is_empty() && cursor.node().start_byte() > range.start {
3322 cursor.goto_previous_sibling();
3323 }
3324 }
3325
3326 // Ascend to the smallest ancestor that strictly contains the range.
3327 loop {
3328 let node_range = cursor.node().byte_range();
3329 if node_range.start <= range.start
3330 && node_range.end >= range.end
3331 && node_range.len() > range.len()
3332 {
3333 break;
3334 }
3335 if !cursor.goto_parent() {
3336 continue 'outer;
3337 }
3338 }
3339
3340 let left_node = cursor.node();
3341 let mut layer_result = left_node;
3342
3343 // For an empty range, try to find another node immediately to the right of the range.
3344 if left_node.end_byte() == range.start {
3345 let mut right_node = None;
3346 while !cursor.goto_next_sibling() {
3347 if !cursor.goto_parent() {
3348 break;
3349 }
3350 }
3351
3352 while cursor.node().start_byte() == range.start {
3353 right_node = Some(cursor.node());
3354 if !cursor.goto_first_child() {
3355 break;
3356 }
3357 }
3358
3359 // If there is a candidate node on both sides of the (empty) range, then
3360 // decide between the two by favoring a named node over an anonymous token.
3361 // If both nodes are the same in that regard, favor the right one.
3362 if let Some(right_node) = right_node {
3363 if right_node.is_named() || !left_node.is_named() {
3364 layer_result = right_node;
3365 }
3366 }
3367 }
3368
3369 if let Some(previous_result) = &result {
3370 if previous_result.byte_range().len() < layer_result.byte_range().len() {
3371 continue;
3372 }
3373 }
3374 result = Some(layer_result);
3375 }
3376
3377 result
3378 }
3379
3380 /// Returns the outline for the buffer.
3381 ///
3382 /// This method allows passing an optional [`SyntaxTheme`] to
3383 /// syntax-highlight the returned symbols.
3384 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Option<Outline<Anchor>> {
3385 self.outline_items_containing(0..self.len(), true, theme)
3386 .map(Outline::new)
3387 }
3388
3389 /// Returns all the symbols that contain the given position.
3390 ///
3391 /// This method allows passing an optional [`SyntaxTheme`] to
3392 /// syntax-highlight the returned symbols.
3393 pub fn symbols_containing<T: ToOffset>(
3394 &self,
3395 position: T,
3396 theme: Option<&SyntaxTheme>,
3397 ) -> Option<Vec<OutlineItem<Anchor>>> {
3398 let position = position.to_offset(self);
3399 let mut items = self.outline_items_containing(
3400 position.saturating_sub(1)..self.len().min(position + 1),
3401 false,
3402 theme,
3403 )?;
3404 let mut prev_depth = None;
3405 items.retain(|item| {
3406 let result = prev_depth.map_or(true, |prev_depth| item.depth > prev_depth);
3407 prev_depth = Some(item.depth);
3408 result
3409 });
3410 Some(items)
3411 }
3412
3413 pub fn outline_range_containing<T: ToOffset>(&self, range: Range<T>) -> Option<Range<Point>> {
3414 let range = range.to_offset(self);
3415 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3416 grammar.outline_config.as_ref().map(|c| &c.query)
3417 });
3418 let configs = matches
3419 .grammars()
3420 .iter()
3421 .map(|g| g.outline_config.as_ref().unwrap())
3422 .collect::<Vec<_>>();
3423
3424 while let Some(mat) = matches.peek() {
3425 let config = &configs[mat.grammar_index];
3426 let containing_item_node = maybe!({
3427 let item_node = mat.captures.iter().find_map(|cap| {
3428 if cap.index == config.item_capture_ix {
3429 Some(cap.node)
3430 } else {
3431 None
3432 }
3433 })?;
3434
3435 let item_byte_range = item_node.byte_range();
3436 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3437 None
3438 } else {
3439 Some(item_node)
3440 }
3441 });
3442
3443 if let Some(item_node) = containing_item_node {
3444 return Some(
3445 Point::from_ts_point(item_node.start_position())
3446 ..Point::from_ts_point(item_node.end_position()),
3447 );
3448 }
3449
3450 matches.advance();
3451 }
3452 None
3453 }
3454
3455 pub fn outline_items_containing<T: ToOffset>(
3456 &self,
3457 range: Range<T>,
3458 include_extra_context: bool,
3459 theme: Option<&SyntaxTheme>,
3460 ) -> Option<Vec<OutlineItem<Anchor>>> {
3461 let range = range.to_offset(self);
3462 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3463 grammar.outline_config.as_ref().map(|c| &c.query)
3464 });
3465 let configs = matches
3466 .grammars()
3467 .iter()
3468 .map(|g| g.outline_config.as_ref().unwrap())
3469 .collect::<Vec<_>>();
3470
3471 let mut items = Vec::new();
3472 let mut annotation_row_ranges: Vec<Range<u32>> = Vec::new();
3473 while let Some(mat) = matches.peek() {
3474 let config = &configs[mat.grammar_index];
3475 if let Some(item) =
3476 self.next_outline_item(config, &mat, &range, include_extra_context, theme)
3477 {
3478 items.push(item);
3479 } else if let Some(capture) = mat
3480 .captures
3481 .iter()
3482 .find(|capture| Some(capture.index) == config.annotation_capture_ix)
3483 {
3484 let capture_range = capture.node.start_position()..capture.node.end_position();
3485 let mut capture_row_range =
3486 capture_range.start.row as u32..capture_range.end.row as u32;
3487 if capture_range.end.row > capture_range.start.row && capture_range.end.column == 0
3488 {
3489 capture_row_range.end -= 1;
3490 }
3491 if let Some(last_row_range) = annotation_row_ranges.last_mut() {
3492 if last_row_range.end >= capture_row_range.start.saturating_sub(1) {
3493 last_row_range.end = capture_row_range.end;
3494 } else {
3495 annotation_row_ranges.push(capture_row_range);
3496 }
3497 } else {
3498 annotation_row_ranges.push(capture_row_range);
3499 }
3500 }
3501 matches.advance();
3502 }
3503
3504 items.sort_by_key(|item| (item.range.start, Reverse(item.range.end)));
3505
3506 // Assign depths based on containment relationships and convert to anchors.
3507 let mut item_ends_stack = Vec::<Point>::new();
3508 let mut anchor_items = Vec::new();
3509 let mut annotation_row_ranges = annotation_row_ranges.into_iter().peekable();
3510 for item in items {
3511 while let Some(last_end) = item_ends_stack.last().copied() {
3512 if last_end < item.range.end {
3513 item_ends_stack.pop();
3514 } else {
3515 break;
3516 }
3517 }
3518
3519 let mut annotation_row_range = None;
3520 while let Some(next_annotation_row_range) = annotation_row_ranges.peek() {
3521 let row_preceding_item = item.range.start.row.saturating_sub(1);
3522 if next_annotation_row_range.end < row_preceding_item {
3523 annotation_row_ranges.next();
3524 } else {
3525 if next_annotation_row_range.end == row_preceding_item {
3526 annotation_row_range = Some(next_annotation_row_range.clone());
3527 annotation_row_ranges.next();
3528 }
3529 break;
3530 }
3531 }
3532
3533 anchor_items.push(OutlineItem {
3534 depth: item_ends_stack.len(),
3535 range: self.anchor_after(item.range.start)..self.anchor_before(item.range.end),
3536 text: item.text,
3537 highlight_ranges: item.highlight_ranges,
3538 name_ranges: item.name_ranges,
3539 body_range: item.body_range.map(|body_range| {
3540 self.anchor_after(body_range.start)..self.anchor_before(body_range.end)
3541 }),
3542 annotation_range: annotation_row_range.map(|annotation_range| {
3543 self.anchor_after(Point::new(annotation_range.start, 0))
3544 ..self.anchor_before(Point::new(
3545 annotation_range.end,
3546 self.line_len(annotation_range.end),
3547 ))
3548 }),
3549 });
3550 item_ends_stack.push(item.range.end);
3551 }
3552
3553 Some(anchor_items)
3554 }
3555
3556 fn next_outline_item(
3557 &self,
3558 config: &OutlineConfig,
3559 mat: &SyntaxMapMatch,
3560 range: &Range<usize>,
3561 include_extra_context: bool,
3562 theme: Option<&SyntaxTheme>,
3563 ) -> Option<OutlineItem<Point>> {
3564 let item_node = mat.captures.iter().find_map(|cap| {
3565 if cap.index == config.item_capture_ix {
3566 Some(cap.node)
3567 } else {
3568 None
3569 }
3570 })?;
3571
3572 let item_byte_range = item_node.byte_range();
3573 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3574 return None;
3575 }
3576 let item_point_range = Point::from_ts_point(item_node.start_position())
3577 ..Point::from_ts_point(item_node.end_position());
3578
3579 let mut open_point = None;
3580 let mut close_point = None;
3581 let mut buffer_ranges = Vec::new();
3582 for capture in mat.captures {
3583 let node_is_name;
3584 if capture.index == config.name_capture_ix {
3585 node_is_name = true;
3586 } else if Some(capture.index) == config.context_capture_ix
3587 || (Some(capture.index) == config.extra_context_capture_ix && include_extra_context)
3588 {
3589 node_is_name = false;
3590 } else {
3591 if Some(capture.index) == config.open_capture_ix {
3592 open_point = Some(Point::from_ts_point(capture.node.end_position()));
3593 } else if Some(capture.index) == config.close_capture_ix {
3594 close_point = Some(Point::from_ts_point(capture.node.start_position()));
3595 }
3596
3597 continue;
3598 }
3599
3600 let mut range = capture.node.start_byte()..capture.node.end_byte();
3601 let start = capture.node.start_position();
3602 if capture.node.end_position().row > start.row {
3603 range.end = range.start + self.line_len(start.row as u32) as usize - start.column;
3604 }
3605
3606 if !range.is_empty() {
3607 buffer_ranges.push((range, node_is_name));
3608 }
3609 }
3610 if buffer_ranges.is_empty() {
3611 return None;
3612 }
3613 let mut text = String::new();
3614 let mut highlight_ranges = Vec::new();
3615 let mut name_ranges = Vec::new();
3616 let mut chunks = self.chunks(
3617 buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end,
3618 true,
3619 );
3620 let mut last_buffer_range_end = 0;
3621
3622 for (buffer_range, is_name) in buffer_ranges {
3623 let space_added = !text.is_empty() && buffer_range.start > last_buffer_range_end;
3624 if space_added {
3625 text.push(' ');
3626 }
3627 let before_append_len = text.len();
3628 let mut offset = buffer_range.start;
3629 chunks.seek(buffer_range.clone());
3630 for mut chunk in chunks.by_ref() {
3631 if chunk.text.len() > buffer_range.end - offset {
3632 chunk.text = &chunk.text[0..(buffer_range.end - offset)];
3633 offset = buffer_range.end;
3634 } else {
3635 offset += chunk.text.len();
3636 }
3637 let style = chunk
3638 .syntax_highlight_id
3639 .zip(theme)
3640 .and_then(|(highlight, theme)| highlight.style(theme));
3641 if let Some(style) = style {
3642 let start = text.len();
3643 let end = start + chunk.text.len();
3644 highlight_ranges.push((start..end, style));
3645 }
3646 text.push_str(chunk.text);
3647 if offset >= buffer_range.end {
3648 break;
3649 }
3650 }
3651 if is_name {
3652 let after_append_len = text.len();
3653 let start = if space_added && !name_ranges.is_empty() {
3654 before_append_len - 1
3655 } else {
3656 before_append_len
3657 };
3658 name_ranges.push(start..after_append_len);
3659 }
3660 last_buffer_range_end = buffer_range.end;
3661 }
3662
3663 Some(OutlineItem {
3664 depth: 0, // We'll calculate the depth later
3665 range: item_point_range,
3666 text,
3667 highlight_ranges,
3668 name_ranges,
3669 body_range: open_point.zip(close_point).map(|(start, end)| start..end),
3670 annotation_range: None,
3671 })
3672 }
3673
3674 pub fn function_body_fold_ranges<T: ToOffset>(
3675 &self,
3676 within: Range<T>,
3677 ) -> impl Iterator<Item = Range<usize>> + '_ {
3678 self.text_object_ranges(within, TreeSitterOptions::default())
3679 .filter_map(|(range, obj)| (obj == TextObject::InsideFunction).then_some(range))
3680 }
3681
3682 /// For each grammar in the language, runs the provided
3683 /// [`tree_sitter::Query`] against the given range.
3684 pub fn matches(
3685 &self,
3686 range: Range<usize>,
3687 query: fn(&Grammar) -> Option<&tree_sitter::Query>,
3688 ) -> SyntaxMapMatches {
3689 self.syntax.matches(range, self, query)
3690 }
3691
3692 pub fn all_bracket_ranges(
3693 &self,
3694 range: Range<usize>,
3695 ) -> impl Iterator<Item = BracketMatch> + '_ {
3696 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3697 grammar.brackets_config.as_ref().map(|c| &c.query)
3698 });
3699 let configs = matches
3700 .grammars()
3701 .iter()
3702 .map(|grammar| grammar.brackets_config.as_ref().unwrap())
3703 .collect::<Vec<_>>();
3704
3705 iter::from_fn(move || {
3706 while let Some(mat) = matches.peek() {
3707 let mut open = None;
3708 let mut close = None;
3709 let config = &configs[mat.grammar_index];
3710 let pattern = &config.patterns[mat.pattern_index];
3711 for capture in mat.captures {
3712 if capture.index == config.open_capture_ix {
3713 open = Some(capture.node.byte_range());
3714 } else if capture.index == config.close_capture_ix {
3715 close = Some(capture.node.byte_range());
3716 }
3717 }
3718
3719 matches.advance();
3720
3721 let Some((open_range, close_range)) = open.zip(close) else {
3722 continue;
3723 };
3724
3725 let bracket_range = open_range.start..=close_range.end;
3726 if !bracket_range.overlaps(&range) {
3727 continue;
3728 }
3729
3730 return Some(BracketMatch {
3731 open_range,
3732 close_range,
3733 newline_only: pattern.newline_only,
3734 });
3735 }
3736 None
3737 })
3738 }
3739
3740 /// Returns bracket range pairs overlapping or adjacent to `range`
3741 pub fn bracket_ranges<T: ToOffset>(
3742 &self,
3743 range: Range<T>,
3744 ) -> impl Iterator<Item = BracketMatch> + '_ {
3745 // Find bracket pairs that *inclusively* contain the given range.
3746 let range = range.start.to_offset(self).saturating_sub(1)
3747 ..self.len().min(range.end.to_offset(self) + 1);
3748 self.all_bracket_ranges(range)
3749 .filter(|pair| !pair.newline_only)
3750 }
3751
3752 pub fn text_object_ranges<T: ToOffset>(
3753 &self,
3754 range: Range<T>,
3755 options: TreeSitterOptions,
3756 ) -> impl Iterator<Item = (Range<usize>, TextObject)> + '_ {
3757 let range = range.start.to_offset(self).saturating_sub(1)
3758 ..self.len().min(range.end.to_offset(self) + 1);
3759
3760 let mut matches =
3761 self.syntax
3762 .matches_with_options(range.clone(), &self.text, options, |grammar| {
3763 grammar.text_object_config.as_ref().map(|c| &c.query)
3764 });
3765
3766 let configs = matches
3767 .grammars()
3768 .iter()
3769 .map(|grammar| grammar.text_object_config.as_ref())
3770 .collect::<Vec<_>>();
3771
3772 let mut captures = Vec::<(Range<usize>, TextObject)>::new();
3773
3774 iter::from_fn(move || {
3775 loop {
3776 while let Some(capture) = captures.pop() {
3777 if capture.0.overlaps(&range) {
3778 return Some(capture);
3779 }
3780 }
3781
3782 let mat = matches.peek()?;
3783
3784 let Some(config) = configs[mat.grammar_index].as_ref() else {
3785 matches.advance();
3786 continue;
3787 };
3788
3789 for capture in mat.captures {
3790 let Some(ix) = config
3791 .text_objects_by_capture_ix
3792 .binary_search_by_key(&capture.index, |e| e.0)
3793 .ok()
3794 else {
3795 continue;
3796 };
3797 let text_object = config.text_objects_by_capture_ix[ix].1;
3798 let byte_range = capture.node.byte_range();
3799
3800 let mut found = false;
3801 for (range, existing) in captures.iter_mut() {
3802 if existing == &text_object {
3803 range.start = range.start.min(byte_range.start);
3804 range.end = range.end.max(byte_range.end);
3805 found = true;
3806 break;
3807 }
3808 }
3809
3810 if !found {
3811 captures.push((byte_range, text_object));
3812 }
3813 }
3814
3815 matches.advance();
3816 }
3817 })
3818 }
3819
3820 /// Returns enclosing bracket ranges containing the given range
3821 pub fn enclosing_bracket_ranges<T: ToOffset>(
3822 &self,
3823 range: Range<T>,
3824 ) -> impl Iterator<Item = BracketMatch> + '_ {
3825 let range = range.start.to_offset(self)..range.end.to_offset(self);
3826
3827 self.bracket_ranges(range.clone()).filter(move |pair| {
3828 pair.open_range.start <= range.start && pair.close_range.end >= range.end
3829 })
3830 }
3831
3832 /// Returns the smallest enclosing bracket ranges containing the given range or None if no brackets contain range
3833 ///
3834 /// Can optionally pass a range_filter to filter the ranges of brackets to consider
3835 pub fn innermost_enclosing_bracket_ranges<T: ToOffset>(
3836 &self,
3837 range: Range<T>,
3838 range_filter: Option<&dyn Fn(Range<usize>, Range<usize>) -> bool>,
3839 ) -> Option<(Range<usize>, Range<usize>)> {
3840 let range = range.start.to_offset(self)..range.end.to_offset(self);
3841
3842 // Get the ranges of the innermost pair of brackets.
3843 let mut result: Option<(Range<usize>, Range<usize>)> = None;
3844
3845 for pair in self.enclosing_bracket_ranges(range.clone()) {
3846 if let Some(range_filter) = range_filter {
3847 if !range_filter(pair.open_range.clone(), pair.close_range.clone()) {
3848 continue;
3849 }
3850 }
3851
3852 let len = pair.close_range.end - pair.open_range.start;
3853
3854 if let Some((existing_open, existing_close)) = &result {
3855 let existing_len = existing_close.end - existing_open.start;
3856 if len > existing_len {
3857 continue;
3858 }
3859 }
3860
3861 result = Some((pair.open_range, pair.close_range));
3862 }
3863
3864 result
3865 }
3866
3867 /// Returns anchor ranges for any matches of the redaction query.
3868 /// The buffer can be associated with multiple languages, and the redaction query associated with each
3869 /// will be run on the relevant section of the buffer.
3870 pub fn redacted_ranges<T: ToOffset>(
3871 &self,
3872 range: Range<T>,
3873 ) -> impl Iterator<Item = Range<usize>> + '_ {
3874 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
3875 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
3876 grammar
3877 .redactions_config
3878 .as_ref()
3879 .map(|config| &config.query)
3880 });
3881
3882 let configs = syntax_matches
3883 .grammars()
3884 .iter()
3885 .map(|grammar| grammar.redactions_config.as_ref())
3886 .collect::<Vec<_>>();
3887
3888 iter::from_fn(move || {
3889 let redacted_range = syntax_matches
3890 .peek()
3891 .and_then(|mat| {
3892 configs[mat.grammar_index].and_then(|config| {
3893 mat.captures
3894 .iter()
3895 .find(|capture| capture.index == config.redaction_capture_ix)
3896 })
3897 })
3898 .map(|mat| mat.node.byte_range());
3899 syntax_matches.advance();
3900 redacted_range
3901 })
3902 }
3903
3904 pub fn injections_intersecting_range<T: ToOffset>(
3905 &self,
3906 range: Range<T>,
3907 ) -> impl Iterator<Item = (Range<usize>, &Arc<Language>)> + '_ {
3908 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
3909
3910 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
3911 grammar
3912 .injection_config
3913 .as_ref()
3914 .map(|config| &config.query)
3915 });
3916
3917 let configs = syntax_matches
3918 .grammars()
3919 .iter()
3920 .map(|grammar| grammar.injection_config.as_ref())
3921 .collect::<Vec<_>>();
3922
3923 iter::from_fn(move || {
3924 let ranges = syntax_matches.peek().and_then(|mat| {
3925 let config = &configs[mat.grammar_index]?;
3926 let content_capture_range = mat.captures.iter().find_map(|capture| {
3927 if capture.index == config.content_capture_ix {
3928 Some(capture.node.byte_range())
3929 } else {
3930 None
3931 }
3932 })?;
3933 let language = self.language_at(content_capture_range.start)?;
3934 Some((content_capture_range, language))
3935 });
3936 syntax_matches.advance();
3937 ranges
3938 })
3939 }
3940
3941 pub fn debug_variable_ranges(
3942 &self,
3943 offset_range: Range<usize>,
3944 ) -> impl Iterator<Item = DebugVariableRanges> + '_ {
3945 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
3946 grammar
3947 .debug_variables_config
3948 .as_ref()
3949 .map(|config| &config.query)
3950 });
3951
3952 let configs = syntax_matches
3953 .grammars()
3954 .iter()
3955 .map(|grammar| grammar.debug_variables_config.as_ref())
3956 .collect::<Vec<_>>();
3957
3958 iter::from_fn(move || {
3959 loop {
3960 let mat = syntax_matches.peek()?;
3961
3962 let variable_ranges = configs[mat.grammar_index].and_then(|config| {
3963 let full_range = mat.captures.iter().fold(
3964 Range {
3965 start: usize::MAX,
3966 end: 0,
3967 },
3968 |mut acc, next| {
3969 let byte_range = next.node.byte_range();
3970 if acc.start > byte_range.start {
3971 acc.start = byte_range.start;
3972 }
3973 if acc.end < byte_range.end {
3974 acc.end = byte_range.end;
3975 }
3976 acc
3977 },
3978 );
3979 if full_range.start > full_range.end {
3980 // We did not find a full spanning range of this match.
3981 return None;
3982 }
3983
3984 let captures = mat.captures.iter().filter_map(|capture| {
3985 Some((
3986 capture,
3987 config.captures.get(capture.index as usize).cloned()?,
3988 ))
3989 });
3990
3991 let mut variable_range = None;
3992 for (query, capture) in captures {
3993 if let DebugVariableCapture::Variable = capture {
3994 let _ = variable_range.insert(query.node.byte_range());
3995 }
3996 }
3997
3998 Some(DebugVariableRanges {
3999 buffer_id: self.remote_id(),
4000 range: variable_range?,
4001 })
4002 });
4003
4004 syntax_matches.advance();
4005 if variable_ranges.is_some() {
4006 // It's fine for us to short-circuit on .peek()? returning None. We don't want to return None from this iter if we
4007 // had a capture that did not contain a run marker, hence we'll just loop around for the next capture.
4008 return variable_ranges;
4009 }
4010 }
4011 })
4012 }
4013
4014 pub fn runnable_ranges(
4015 &self,
4016 offset_range: Range<usize>,
4017 ) -> impl Iterator<Item = RunnableRange> + '_ {
4018 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4019 grammar.runnable_config.as_ref().map(|config| &config.query)
4020 });
4021
4022 let test_configs = syntax_matches
4023 .grammars()
4024 .iter()
4025 .map(|grammar| grammar.runnable_config.as_ref())
4026 .collect::<Vec<_>>();
4027
4028 iter::from_fn(move || {
4029 loop {
4030 let mat = syntax_matches.peek()?;
4031
4032 let test_range = test_configs[mat.grammar_index].and_then(|test_configs| {
4033 let mut run_range = None;
4034 let full_range = mat.captures.iter().fold(
4035 Range {
4036 start: usize::MAX,
4037 end: 0,
4038 },
4039 |mut acc, next| {
4040 let byte_range = next.node.byte_range();
4041 if acc.start > byte_range.start {
4042 acc.start = byte_range.start;
4043 }
4044 if acc.end < byte_range.end {
4045 acc.end = byte_range.end;
4046 }
4047 acc
4048 },
4049 );
4050 if full_range.start > full_range.end {
4051 // We did not find a full spanning range of this match.
4052 return None;
4053 }
4054 let extra_captures: SmallVec<[_; 1]> =
4055 SmallVec::from_iter(mat.captures.iter().filter_map(|capture| {
4056 test_configs
4057 .extra_captures
4058 .get(capture.index as usize)
4059 .cloned()
4060 .and_then(|tag_name| match tag_name {
4061 RunnableCapture::Named(name) => {
4062 Some((capture.node.byte_range(), name))
4063 }
4064 RunnableCapture::Run => {
4065 let _ = run_range.insert(capture.node.byte_range());
4066 None
4067 }
4068 })
4069 }));
4070 let run_range = run_range?;
4071 let tags = test_configs
4072 .query
4073 .property_settings(mat.pattern_index)
4074 .iter()
4075 .filter_map(|property| {
4076 if *property.key == *"tag" {
4077 property
4078 .value
4079 .as_ref()
4080 .map(|value| RunnableTag(value.to_string().into()))
4081 } else {
4082 None
4083 }
4084 })
4085 .collect();
4086 let extra_captures = extra_captures
4087 .into_iter()
4088 .map(|(range, name)| {
4089 (
4090 name.to_string(),
4091 self.text_for_range(range.clone()).collect::<String>(),
4092 )
4093 })
4094 .collect();
4095 // All tags should have the same range.
4096 Some(RunnableRange {
4097 run_range,
4098 full_range,
4099 runnable: Runnable {
4100 tags,
4101 language: mat.language,
4102 buffer: self.remote_id(),
4103 },
4104 extra_captures,
4105 buffer_id: self.remote_id(),
4106 })
4107 });
4108
4109 syntax_matches.advance();
4110 if test_range.is_some() {
4111 // It's fine for us to short-circuit on .peek()? returning None. We don't want to return None from this iter if we
4112 // had a capture that did not contain a run marker, hence we'll just loop around for the next capture.
4113 return test_range;
4114 }
4115 }
4116 })
4117 }
4118
4119 /// Returns selections for remote peers intersecting the given range.
4120 #[allow(clippy::type_complexity)]
4121 pub fn selections_in_range(
4122 &self,
4123 range: Range<Anchor>,
4124 include_local: bool,
4125 ) -> impl Iterator<
4126 Item = (
4127 ReplicaId,
4128 bool,
4129 CursorShape,
4130 impl Iterator<Item = &Selection<Anchor>> + '_,
4131 ),
4132 > + '_ {
4133 self.remote_selections
4134 .iter()
4135 .filter(move |(replica_id, set)| {
4136 (include_local || **replica_id != self.text.replica_id())
4137 && !set.selections.is_empty()
4138 })
4139 .map(move |(replica_id, set)| {
4140 let start_ix = match set.selections.binary_search_by(|probe| {
4141 probe.end.cmp(&range.start, self).then(Ordering::Greater)
4142 }) {
4143 Ok(ix) | Err(ix) => ix,
4144 };
4145 let end_ix = match set.selections.binary_search_by(|probe| {
4146 probe.start.cmp(&range.end, self).then(Ordering::Less)
4147 }) {
4148 Ok(ix) | Err(ix) => ix,
4149 };
4150
4151 (
4152 *replica_id,
4153 set.line_mode,
4154 set.cursor_shape,
4155 set.selections[start_ix..end_ix].iter(),
4156 )
4157 })
4158 }
4159
4160 /// Returns if the buffer contains any diagnostics.
4161 pub fn has_diagnostics(&self) -> bool {
4162 !self.diagnostics.is_empty()
4163 }
4164
4165 /// Returns all the diagnostics intersecting the given range.
4166 pub fn diagnostics_in_range<'a, T, O>(
4167 &'a self,
4168 search_range: Range<T>,
4169 reversed: bool,
4170 ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
4171 where
4172 T: 'a + Clone + ToOffset,
4173 O: 'a + FromAnchor,
4174 {
4175 let mut iterators: Vec<_> = self
4176 .diagnostics
4177 .iter()
4178 .map(|(_, collection)| {
4179 collection
4180 .range::<T, text::Anchor>(search_range.clone(), self, true, reversed)
4181 .peekable()
4182 })
4183 .collect();
4184
4185 std::iter::from_fn(move || {
4186 let (next_ix, _) = iterators
4187 .iter_mut()
4188 .enumerate()
4189 .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
4190 .min_by(|(_, a), (_, b)| {
4191 let cmp = a
4192 .range
4193 .start
4194 .cmp(&b.range.start, self)
4195 // when range is equal, sort by diagnostic severity
4196 .then(a.diagnostic.severity.cmp(&b.diagnostic.severity))
4197 // and stabilize order with group_id
4198 .then(a.diagnostic.group_id.cmp(&b.diagnostic.group_id));
4199 if reversed { cmp.reverse() } else { cmp }
4200 })?;
4201 iterators[next_ix]
4202 .next()
4203 .map(|DiagnosticEntry { range, diagnostic }| DiagnosticEntry {
4204 diagnostic,
4205 range: FromAnchor::from_anchor(&range.start, self)
4206 ..FromAnchor::from_anchor(&range.end, self),
4207 })
4208 })
4209 }
4210
4211 /// Returns all the diagnostic groups associated with the given
4212 /// language server ID. If no language server ID is provided,
4213 /// all diagnostics groups are returned.
4214 pub fn diagnostic_groups(
4215 &self,
4216 language_server_id: Option<LanguageServerId>,
4217 ) -> Vec<(LanguageServerId, DiagnosticGroup<Anchor>)> {
4218 let mut groups = Vec::new();
4219
4220 if let Some(language_server_id) = language_server_id {
4221 if let Ok(ix) = self
4222 .diagnostics
4223 .binary_search_by_key(&language_server_id, |e| e.0)
4224 {
4225 self.diagnostics[ix]
4226 .1
4227 .groups(language_server_id, &mut groups, self);
4228 }
4229 } else {
4230 for (language_server_id, diagnostics) in self.diagnostics.iter() {
4231 diagnostics.groups(*language_server_id, &mut groups, self);
4232 }
4233 }
4234
4235 groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
4236 let a_start = &group_a.entries[group_a.primary_ix].range.start;
4237 let b_start = &group_b.entries[group_b.primary_ix].range.start;
4238 a_start.cmp(b_start, self).then_with(|| id_a.cmp(id_b))
4239 });
4240
4241 groups
4242 }
4243
4244 /// Returns an iterator over the diagnostics for the given group.
4245 pub fn diagnostic_group<O>(
4246 &self,
4247 group_id: usize,
4248 ) -> impl Iterator<Item = DiagnosticEntry<O>> + '_
4249 where
4250 O: FromAnchor + 'static,
4251 {
4252 self.diagnostics
4253 .iter()
4254 .flat_map(move |(_, set)| set.group(group_id, self))
4255 }
4256
4257 /// An integer version number that accounts for all updates besides
4258 /// the buffer's text itself (which is versioned via a version vector).
4259 pub fn non_text_state_update_count(&self) -> usize {
4260 self.non_text_state_update_count
4261 }
4262
4263 /// Returns a snapshot of underlying file.
4264 pub fn file(&self) -> Option<&Arc<dyn File>> {
4265 self.file.as_ref()
4266 }
4267
4268 /// Resolves the file path (relative to the worktree root) associated with the underlying file.
4269 pub fn resolve_file_path(&self, cx: &App, include_root: bool) -> Option<PathBuf> {
4270 if let Some(file) = self.file() {
4271 if file.path().file_name().is_none() || include_root {
4272 Some(file.full_path(cx))
4273 } else {
4274 Some(file.path().to_path_buf())
4275 }
4276 } else {
4277 None
4278 }
4279 }
4280
4281 pub fn words_in_range(&self, query: WordsQuery) -> BTreeMap<String, Range<Anchor>> {
4282 let query_str = query.fuzzy_contents;
4283 if query_str.map_or(false, |query| query.is_empty()) {
4284 return BTreeMap::default();
4285 }
4286
4287 let classifier = CharClassifier::new(self.language.clone().map(|language| LanguageScope {
4288 language,
4289 override_id: None,
4290 }));
4291
4292 let mut query_ix = 0;
4293 let query_chars = query_str.map(|query| query.chars().collect::<Vec<_>>());
4294 let query_len = query_chars.as_ref().map_or(0, |query| query.len());
4295
4296 let mut words = BTreeMap::default();
4297 let mut current_word_start_ix = None;
4298 let mut chunk_ix = query.range.start;
4299 for chunk in self.chunks(query.range, false) {
4300 for (i, c) in chunk.text.char_indices() {
4301 let ix = chunk_ix + i;
4302 if classifier.is_word(c) {
4303 if current_word_start_ix.is_none() {
4304 current_word_start_ix = Some(ix);
4305 }
4306
4307 if let Some(query_chars) = &query_chars {
4308 if query_ix < query_len {
4309 if c.to_lowercase().eq(query_chars[query_ix].to_lowercase()) {
4310 query_ix += 1;
4311 }
4312 }
4313 }
4314 continue;
4315 } else if let Some(word_start) = current_word_start_ix.take() {
4316 if query_ix == query_len {
4317 let word_range = self.anchor_before(word_start)..self.anchor_after(ix);
4318 let mut word_text = self.text_for_range(word_start..ix).peekable();
4319 let first_char = word_text
4320 .peek()
4321 .and_then(|first_chunk| first_chunk.chars().next());
4322 // Skip empty and "words" starting with digits as a heuristic to reduce useless completions
4323 if !query.skip_digits
4324 || first_char.map_or(true, |first_char| !first_char.is_digit(10))
4325 {
4326 words.insert(word_text.collect(), word_range);
4327 }
4328 }
4329 }
4330 query_ix = 0;
4331 }
4332 chunk_ix += chunk.text.len();
4333 }
4334
4335 words
4336 }
4337}
4338
4339pub struct WordsQuery<'a> {
4340 /// Only returns words with all chars from the fuzzy string in them.
4341 pub fuzzy_contents: Option<&'a str>,
4342 /// Skips words that start with a digit.
4343 pub skip_digits: bool,
4344 /// Buffer offset range, to look for words.
4345 pub range: Range<usize>,
4346}
4347
4348fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
4349 indent_size_for_text(text.chars_at(Point::new(row, 0)))
4350}
4351
4352fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
4353 let mut result = IndentSize::spaces(0);
4354 for c in text {
4355 let kind = match c {
4356 ' ' => IndentKind::Space,
4357 '\t' => IndentKind::Tab,
4358 _ => break,
4359 };
4360 if result.len == 0 {
4361 result.kind = kind;
4362 }
4363 result.len += 1;
4364 }
4365 result
4366}
4367
4368impl Clone for BufferSnapshot {
4369 fn clone(&self) -> Self {
4370 Self {
4371 text: self.text.clone(),
4372 syntax: self.syntax.clone(),
4373 file: self.file.clone(),
4374 remote_selections: self.remote_selections.clone(),
4375 diagnostics: self.diagnostics.clone(),
4376 language: self.language.clone(),
4377 non_text_state_update_count: self.non_text_state_update_count,
4378 }
4379 }
4380}
4381
4382impl Deref for BufferSnapshot {
4383 type Target = text::BufferSnapshot;
4384
4385 fn deref(&self) -> &Self::Target {
4386 &self.text
4387 }
4388}
4389
4390unsafe impl Send for BufferChunks<'_> {}
4391
4392impl<'a> BufferChunks<'a> {
4393 pub(crate) fn new(
4394 text: &'a Rope,
4395 range: Range<usize>,
4396 syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
4397 diagnostics: bool,
4398 buffer_snapshot: Option<&'a BufferSnapshot>,
4399 ) -> Self {
4400 let mut highlights = None;
4401 if let Some((captures, highlight_maps)) = syntax {
4402 highlights = Some(BufferChunkHighlights {
4403 captures,
4404 next_capture: None,
4405 stack: Default::default(),
4406 highlight_maps,
4407 })
4408 }
4409
4410 let diagnostic_endpoints = diagnostics.then(|| Vec::new().into_iter().peekable());
4411 let chunks = text.chunks_in_range(range.clone());
4412
4413 let mut this = BufferChunks {
4414 range,
4415 buffer_snapshot,
4416 chunks,
4417 diagnostic_endpoints,
4418 error_depth: 0,
4419 warning_depth: 0,
4420 information_depth: 0,
4421 hint_depth: 0,
4422 unnecessary_depth: 0,
4423 highlights,
4424 };
4425 this.initialize_diagnostic_endpoints();
4426 this
4427 }
4428
4429 /// Seeks to the given byte offset in the buffer.
4430 pub fn seek(&mut self, range: Range<usize>) {
4431 let old_range = std::mem::replace(&mut self.range, range.clone());
4432 self.chunks.set_range(self.range.clone());
4433 if let Some(highlights) = self.highlights.as_mut() {
4434 if old_range.start <= self.range.start && old_range.end >= self.range.end {
4435 // Reuse existing highlights stack, as the new range is a subrange of the old one.
4436 highlights
4437 .stack
4438 .retain(|(end_offset, _)| *end_offset > range.start);
4439 if let Some(capture) = &highlights.next_capture {
4440 if range.start >= capture.node.start_byte() {
4441 let next_capture_end = capture.node.end_byte();
4442 if range.start < next_capture_end {
4443 highlights.stack.push((
4444 next_capture_end,
4445 highlights.highlight_maps[capture.grammar_index].get(capture.index),
4446 ));
4447 }
4448 highlights.next_capture.take();
4449 }
4450 }
4451 } else if let Some(snapshot) = self.buffer_snapshot {
4452 let (captures, highlight_maps) = snapshot.get_highlights(self.range.clone());
4453 *highlights = BufferChunkHighlights {
4454 captures,
4455 next_capture: None,
4456 stack: Default::default(),
4457 highlight_maps,
4458 };
4459 } else {
4460 // We cannot obtain new highlights for a language-aware buffer iterator, as we don't have a buffer snapshot.
4461 // Seeking such BufferChunks is not supported.
4462 debug_assert!(
4463 false,
4464 "Attempted to seek on a language-aware buffer iterator without associated buffer snapshot"
4465 );
4466 }
4467
4468 highlights.captures.set_byte_range(self.range.clone());
4469 self.initialize_diagnostic_endpoints();
4470 }
4471 }
4472
4473 fn initialize_diagnostic_endpoints(&mut self) {
4474 if let Some(diagnostics) = self.diagnostic_endpoints.as_mut() {
4475 if let Some(buffer) = self.buffer_snapshot {
4476 let mut diagnostic_endpoints = Vec::new();
4477 for entry in buffer.diagnostics_in_range::<_, usize>(self.range.clone(), false) {
4478 diagnostic_endpoints.push(DiagnosticEndpoint {
4479 offset: entry.range.start,
4480 is_start: true,
4481 severity: entry.diagnostic.severity,
4482 is_unnecessary: entry.diagnostic.is_unnecessary,
4483 });
4484 diagnostic_endpoints.push(DiagnosticEndpoint {
4485 offset: entry.range.end,
4486 is_start: false,
4487 severity: entry.diagnostic.severity,
4488 is_unnecessary: entry.diagnostic.is_unnecessary,
4489 });
4490 }
4491 diagnostic_endpoints
4492 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
4493 *diagnostics = diagnostic_endpoints.into_iter().peekable();
4494 self.hint_depth = 0;
4495 self.error_depth = 0;
4496 self.warning_depth = 0;
4497 self.information_depth = 0;
4498 }
4499 }
4500 }
4501
4502 /// The current byte offset in the buffer.
4503 pub fn offset(&self) -> usize {
4504 self.range.start
4505 }
4506
4507 pub fn range(&self) -> Range<usize> {
4508 self.range.clone()
4509 }
4510
4511 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
4512 let depth = match endpoint.severity {
4513 DiagnosticSeverity::ERROR => &mut self.error_depth,
4514 DiagnosticSeverity::WARNING => &mut self.warning_depth,
4515 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
4516 DiagnosticSeverity::HINT => &mut self.hint_depth,
4517 _ => return,
4518 };
4519 if endpoint.is_start {
4520 *depth += 1;
4521 } else {
4522 *depth -= 1;
4523 }
4524
4525 if endpoint.is_unnecessary {
4526 if endpoint.is_start {
4527 self.unnecessary_depth += 1;
4528 } else {
4529 self.unnecessary_depth -= 1;
4530 }
4531 }
4532 }
4533
4534 fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
4535 if self.error_depth > 0 {
4536 Some(DiagnosticSeverity::ERROR)
4537 } else if self.warning_depth > 0 {
4538 Some(DiagnosticSeverity::WARNING)
4539 } else if self.information_depth > 0 {
4540 Some(DiagnosticSeverity::INFORMATION)
4541 } else if self.hint_depth > 0 {
4542 Some(DiagnosticSeverity::HINT)
4543 } else {
4544 None
4545 }
4546 }
4547
4548 fn current_code_is_unnecessary(&self) -> bool {
4549 self.unnecessary_depth > 0
4550 }
4551}
4552
4553impl<'a> Iterator for BufferChunks<'a> {
4554 type Item = Chunk<'a>;
4555
4556 fn next(&mut self) -> Option<Self::Item> {
4557 let mut next_capture_start = usize::MAX;
4558 let mut next_diagnostic_endpoint = usize::MAX;
4559
4560 if let Some(highlights) = self.highlights.as_mut() {
4561 while let Some((parent_capture_end, _)) = highlights.stack.last() {
4562 if *parent_capture_end <= self.range.start {
4563 highlights.stack.pop();
4564 } else {
4565 break;
4566 }
4567 }
4568
4569 if highlights.next_capture.is_none() {
4570 highlights.next_capture = highlights.captures.next();
4571 }
4572
4573 while let Some(capture) = highlights.next_capture.as_ref() {
4574 if self.range.start < capture.node.start_byte() {
4575 next_capture_start = capture.node.start_byte();
4576 break;
4577 } else {
4578 let highlight_id =
4579 highlights.highlight_maps[capture.grammar_index].get(capture.index);
4580 highlights
4581 .stack
4582 .push((capture.node.end_byte(), highlight_id));
4583 highlights.next_capture = highlights.captures.next();
4584 }
4585 }
4586 }
4587
4588 let mut diagnostic_endpoints = std::mem::take(&mut self.diagnostic_endpoints);
4589 if let Some(diagnostic_endpoints) = diagnostic_endpoints.as_mut() {
4590 while let Some(endpoint) = diagnostic_endpoints.peek().copied() {
4591 if endpoint.offset <= self.range.start {
4592 self.update_diagnostic_depths(endpoint);
4593 diagnostic_endpoints.next();
4594 } else {
4595 next_diagnostic_endpoint = endpoint.offset;
4596 break;
4597 }
4598 }
4599 }
4600 self.diagnostic_endpoints = diagnostic_endpoints;
4601
4602 if let Some(chunk) = self.chunks.peek() {
4603 let chunk_start = self.range.start;
4604 let mut chunk_end = (self.chunks.offset() + chunk.len())
4605 .min(next_capture_start)
4606 .min(next_diagnostic_endpoint);
4607 let mut highlight_id = None;
4608 if let Some(highlights) = self.highlights.as_ref() {
4609 if let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last() {
4610 chunk_end = chunk_end.min(*parent_capture_end);
4611 highlight_id = Some(*parent_highlight_id);
4612 }
4613 }
4614
4615 let slice =
4616 &chunk[chunk_start - self.chunks.offset()..chunk_end - self.chunks.offset()];
4617 self.range.start = chunk_end;
4618 if self.range.start == self.chunks.offset() + chunk.len() {
4619 self.chunks.next().unwrap();
4620 }
4621
4622 Some(Chunk {
4623 text: slice,
4624 syntax_highlight_id: highlight_id,
4625 diagnostic_severity: self.current_diagnostic_severity(),
4626 is_unnecessary: self.current_code_is_unnecessary(),
4627 ..Default::default()
4628 })
4629 } else {
4630 None
4631 }
4632 }
4633}
4634
4635impl operation_queue::Operation for Operation {
4636 fn lamport_timestamp(&self) -> clock::Lamport {
4637 match self {
4638 Operation::Buffer(_) => {
4639 unreachable!("buffer operations should never be deferred at this layer")
4640 }
4641 Operation::UpdateDiagnostics {
4642 lamport_timestamp, ..
4643 }
4644 | Operation::UpdateSelections {
4645 lamport_timestamp, ..
4646 }
4647 | Operation::UpdateCompletionTriggers {
4648 lamport_timestamp, ..
4649 } => *lamport_timestamp,
4650 }
4651 }
4652}
4653
4654impl Default for Diagnostic {
4655 fn default() -> Self {
4656 Self {
4657 source: Default::default(),
4658 code: None,
4659 code_description: None,
4660 severity: DiagnosticSeverity::ERROR,
4661 message: Default::default(),
4662 markdown: None,
4663 group_id: 0,
4664 is_primary: false,
4665 is_disk_based: false,
4666 is_unnecessary: false,
4667 data: None,
4668 }
4669 }
4670}
4671
4672impl IndentSize {
4673 /// Returns an [`IndentSize`] representing the given spaces.
4674 pub fn spaces(len: u32) -> Self {
4675 Self {
4676 len,
4677 kind: IndentKind::Space,
4678 }
4679 }
4680
4681 /// Returns an [`IndentSize`] representing a tab.
4682 pub fn tab() -> Self {
4683 Self {
4684 len: 1,
4685 kind: IndentKind::Tab,
4686 }
4687 }
4688
4689 /// An iterator over the characters represented by this [`IndentSize`].
4690 pub fn chars(&self) -> impl Iterator<Item = char> {
4691 iter::repeat(self.char()).take(self.len as usize)
4692 }
4693
4694 /// The character representation of this [`IndentSize`].
4695 pub fn char(&self) -> char {
4696 match self.kind {
4697 IndentKind::Space => ' ',
4698 IndentKind::Tab => '\t',
4699 }
4700 }
4701
4702 /// Consumes the current [`IndentSize`] and returns a new one that has
4703 /// been shrunk or enlarged by the given size along the given direction.
4704 pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
4705 match direction {
4706 Ordering::Less => {
4707 if self.kind == size.kind && self.len >= size.len {
4708 self.len -= size.len;
4709 }
4710 }
4711 Ordering::Equal => {}
4712 Ordering::Greater => {
4713 if self.len == 0 {
4714 self = size;
4715 } else if self.kind == size.kind {
4716 self.len += size.len;
4717 }
4718 }
4719 }
4720 self
4721 }
4722
4723 pub fn len_with_expanded_tabs(&self, tab_size: NonZeroU32) -> usize {
4724 match self.kind {
4725 IndentKind::Space => self.len as usize,
4726 IndentKind::Tab => self.len as usize * tab_size.get() as usize,
4727 }
4728 }
4729}
4730
4731#[cfg(any(test, feature = "test-support"))]
4732pub struct TestFile {
4733 pub path: Arc<Path>,
4734 pub root_name: String,
4735 pub local_root: Option<PathBuf>,
4736}
4737
4738#[cfg(any(test, feature = "test-support"))]
4739impl File for TestFile {
4740 fn path(&self) -> &Arc<Path> {
4741 &self.path
4742 }
4743
4744 fn full_path(&self, _: &gpui::App) -> PathBuf {
4745 PathBuf::from(&self.root_name).join(self.path.as_ref())
4746 }
4747
4748 fn as_local(&self) -> Option<&dyn LocalFile> {
4749 if self.local_root.is_some() {
4750 Some(self)
4751 } else {
4752 None
4753 }
4754 }
4755
4756 fn disk_state(&self) -> DiskState {
4757 unimplemented!()
4758 }
4759
4760 fn file_name<'a>(&'a self, _: &'a gpui::App) -> &'a std::ffi::OsStr {
4761 self.path().file_name().unwrap_or(self.root_name.as_ref())
4762 }
4763
4764 fn worktree_id(&self, _: &App) -> WorktreeId {
4765 WorktreeId::from_usize(0)
4766 }
4767
4768 fn to_proto(&self, _: &App) -> rpc::proto::File {
4769 unimplemented!()
4770 }
4771
4772 fn is_private(&self) -> bool {
4773 false
4774 }
4775}
4776
4777#[cfg(any(test, feature = "test-support"))]
4778impl LocalFile for TestFile {
4779 fn abs_path(&self, _cx: &App) -> PathBuf {
4780 PathBuf::from(self.local_root.as_ref().unwrap())
4781 .join(&self.root_name)
4782 .join(self.path.as_ref())
4783 }
4784
4785 fn load(&self, _cx: &App) -> Task<Result<String>> {
4786 unimplemented!()
4787 }
4788
4789 fn load_bytes(&self, _cx: &App) -> Task<Result<Vec<u8>>> {
4790 unimplemented!()
4791 }
4792}
4793
4794pub(crate) fn contiguous_ranges(
4795 values: impl Iterator<Item = u32>,
4796 max_len: usize,
4797) -> impl Iterator<Item = Range<u32>> {
4798 let mut values = values;
4799 let mut current_range: Option<Range<u32>> = None;
4800 std::iter::from_fn(move || {
4801 loop {
4802 if let Some(value) = values.next() {
4803 if let Some(range) = &mut current_range {
4804 if value == range.end && range.len() < max_len {
4805 range.end += 1;
4806 continue;
4807 }
4808 }
4809
4810 let prev_range = current_range.clone();
4811 current_range = Some(value..(value + 1));
4812 if prev_range.is_some() {
4813 return prev_range;
4814 }
4815 } else {
4816 return current_range.take();
4817 }
4818 }
4819 })
4820}
4821
4822#[derive(Default, Debug)]
4823pub struct CharClassifier {
4824 scope: Option<LanguageScope>,
4825 for_completion: bool,
4826 ignore_punctuation: bool,
4827}
4828
4829impl CharClassifier {
4830 pub fn new(scope: Option<LanguageScope>) -> Self {
4831 Self {
4832 scope,
4833 for_completion: false,
4834 ignore_punctuation: false,
4835 }
4836 }
4837
4838 pub fn for_completion(self, for_completion: bool) -> Self {
4839 Self {
4840 for_completion,
4841 ..self
4842 }
4843 }
4844
4845 pub fn ignore_punctuation(self, ignore_punctuation: bool) -> Self {
4846 Self {
4847 ignore_punctuation,
4848 ..self
4849 }
4850 }
4851
4852 pub fn is_whitespace(&self, c: char) -> bool {
4853 self.kind(c) == CharKind::Whitespace
4854 }
4855
4856 pub fn is_word(&self, c: char) -> bool {
4857 self.kind(c) == CharKind::Word
4858 }
4859
4860 pub fn is_punctuation(&self, c: char) -> bool {
4861 self.kind(c) == CharKind::Punctuation
4862 }
4863
4864 pub fn kind_with(&self, c: char, ignore_punctuation: bool) -> CharKind {
4865 if c.is_alphanumeric() || c == '_' {
4866 return CharKind::Word;
4867 }
4868
4869 if let Some(scope) = &self.scope {
4870 let characters = if self.for_completion {
4871 scope.completion_query_characters()
4872 } else {
4873 scope.word_characters()
4874 };
4875 if let Some(characters) = characters {
4876 if characters.contains(&c) {
4877 return CharKind::Word;
4878 }
4879 }
4880 }
4881
4882 if c.is_whitespace() {
4883 return CharKind::Whitespace;
4884 }
4885
4886 if ignore_punctuation {
4887 CharKind::Word
4888 } else {
4889 CharKind::Punctuation
4890 }
4891 }
4892
4893 pub fn kind(&self, c: char) -> CharKind {
4894 self.kind_with(c, self.ignore_punctuation)
4895 }
4896}
4897
4898/// Find all of the ranges of whitespace that occur at the ends of lines
4899/// in the given rope.
4900///
4901/// This could also be done with a regex search, but this implementation
4902/// avoids copying text.
4903pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
4904 let mut ranges = Vec::new();
4905
4906 let mut offset = 0;
4907 let mut prev_chunk_trailing_whitespace_range = 0..0;
4908 for chunk in rope.chunks() {
4909 let mut prev_line_trailing_whitespace_range = 0..0;
4910 for (i, line) in chunk.split('\n').enumerate() {
4911 let line_end_offset = offset + line.len();
4912 let trimmed_line_len = line.trim_end_matches([' ', '\t']).len();
4913 let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
4914
4915 if i == 0 && trimmed_line_len == 0 {
4916 trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
4917 }
4918 if !prev_line_trailing_whitespace_range.is_empty() {
4919 ranges.push(prev_line_trailing_whitespace_range);
4920 }
4921
4922 offset = line_end_offset + 1;
4923 prev_line_trailing_whitespace_range = trailing_whitespace_range;
4924 }
4925
4926 offset -= 1;
4927 prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
4928 }
4929
4930 if !prev_chunk_trailing_whitespace_range.is_empty() {
4931 ranges.push(prev_chunk_trailing_whitespace_range);
4932 }
4933
4934 ranges
4935}