1pub use crate::{
2 Grammar, Language, LanguageRegistry,
3 diagnostic_set::DiagnosticSet,
4 highlight_map::{HighlightId, HighlightMap},
5 proto,
6};
7use crate::{
8 LanguageScope, Outline, OutlineConfig, RunnableCapture, RunnableTag, TextObject,
9 TreeSitterOptions,
10 diagnostic_set::{DiagnosticEntry, DiagnosticGroup},
11 language_settings::{LanguageSettings, language_settings},
12 outline::OutlineItem,
13 syntax_map::{
14 SyntaxLayer, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures, SyntaxMapMatch,
15 SyntaxMapMatches, SyntaxSnapshot, ToTreeSitterPoint,
16 },
17 task_context::RunnableRange,
18 text_diff::text_diff,
19};
20use anyhow::{Context as _, Result, anyhow};
21use async_watch as watch;
22pub use clock::ReplicaId;
23use clock::{AGENT_REPLICA_ID, Lamport};
24use collections::HashMap;
25use fs::MTime;
26use futures::channel::oneshot;
27use gpui::{
28 App, AppContext as _, Context, Entity, EventEmitter, HighlightStyle, SharedString, StyledText,
29 Task, TaskLabel, TextStyle,
30};
31use lsp::{LanguageServerId, NumberOrString};
32use parking_lot::Mutex;
33use schemars::JsonSchema;
34use serde::{Deserialize, Serialize};
35use serde_json::Value;
36use settings::WorktreeId;
37use smallvec::SmallVec;
38use smol::future::yield_now;
39use std::{
40 any::Any,
41 borrow::Cow,
42 cell::Cell,
43 cmp::{self, Ordering, Reverse},
44 collections::{BTreeMap, BTreeSet},
45 ffi::OsStr,
46 future::Future,
47 iter::{self, Iterator, Peekable},
48 mem,
49 num::NonZeroU32,
50 ops::{Deref, Range},
51 path::{Path, PathBuf},
52 rc,
53 sync::{Arc, LazyLock},
54 time::{Duration, Instant},
55 vec,
56};
57use sum_tree::TreeMap;
58use text::operation_queue::OperationQueue;
59use text::*;
60pub use text::{
61 Anchor, Bias, Buffer as TextBuffer, BufferId, BufferSnapshot as TextBufferSnapshot, Edit,
62 LineIndent, OffsetRangeExt, OffsetUtf16, Patch, Point, PointUtf16, Rope, Selection,
63 SelectionGoal, Subscription, TextDimension, TextSummary, ToOffset, ToOffsetUtf16, ToPoint,
64 ToPointUtf16, Transaction, TransactionId, Unclipped,
65};
66use theme::{ActiveTheme as _, SyntaxTheme};
67#[cfg(any(test, feature = "test-support"))]
68use util::RandomCharIter;
69use util::{RangeExt, debug_panic, maybe};
70
71#[cfg(any(test, feature = "test-support"))]
72pub use {tree_sitter_python, tree_sitter_rust, tree_sitter_typescript};
73
74pub use lsp::DiagnosticSeverity;
75
76/// A label for the background task spawned by the buffer to compute
77/// a diff against the contents of its file.
78pub static BUFFER_DIFF_TASK: LazyLock<TaskLabel> = LazyLock::new(TaskLabel::new);
79
80/// Indicate whether a [`Buffer`] has permissions to edit.
81#[derive(PartialEq, Clone, Copy, Debug)]
82pub enum Capability {
83 /// The buffer is a mutable replica.
84 ReadWrite,
85 /// The buffer is a read-only replica.
86 ReadOnly,
87}
88
89pub type BufferRow = u32;
90
91/// An in-memory representation of a source code file, including its text,
92/// syntax trees, git status, and diagnostics.
93pub struct Buffer {
94 text: TextBuffer,
95 branch_state: Option<BufferBranchState>,
96 /// Filesystem state, `None` when there is no path.
97 file: Option<Arc<dyn File>>,
98 /// The mtime of the file when this buffer was last loaded from
99 /// or saved to disk.
100 saved_mtime: Option<MTime>,
101 /// The version vector when this buffer was last loaded from
102 /// or saved to disk.
103 saved_version: clock::Global,
104 preview_version: clock::Global,
105 transaction_depth: usize,
106 was_dirty_before_starting_transaction: Option<bool>,
107 reload_task: Option<Task<Result<()>>>,
108 language: Option<Arc<Language>>,
109 autoindent_requests: Vec<Arc<AutoindentRequest>>,
110 pending_autoindent: Option<Task<()>>,
111 sync_parse_timeout: Duration,
112 syntax_map: Mutex<SyntaxMap>,
113 reparse: Option<Task<()>>,
114 parse_status: (watch::Sender<ParseStatus>, watch::Receiver<ParseStatus>),
115 non_text_state_update_count: usize,
116 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
117 remote_selections: TreeMap<ReplicaId, SelectionSet>,
118 diagnostics_timestamp: clock::Lamport,
119 completion_triggers: BTreeSet<String>,
120 completion_triggers_per_language_server: HashMap<LanguageServerId, BTreeSet<String>>,
121 completion_triggers_timestamp: clock::Lamport,
122 deferred_ops: OperationQueue<Operation>,
123 capability: Capability,
124 has_conflict: bool,
125 /// Memoize calls to has_changes_since(saved_version).
126 /// The contents of a cell are (self.version, has_changes) at the time of a last call.
127 has_unsaved_edits: Cell<(clock::Global, bool)>,
128 change_bits: Vec<rc::Weak<Cell<bool>>>,
129 _subscriptions: Vec<gpui::Subscription>,
130}
131
132#[derive(Copy, Clone, Debug, PartialEq, Eq)]
133pub enum ParseStatus {
134 Idle,
135 Parsing,
136}
137
138struct BufferBranchState {
139 base_buffer: Entity<Buffer>,
140 merged_operations: Vec<Lamport>,
141}
142
143/// An immutable, cheaply cloneable representation of a fixed
144/// state of a buffer.
145pub struct BufferSnapshot {
146 pub text: text::BufferSnapshot,
147 pub(crate) syntax: SyntaxSnapshot,
148 file: Option<Arc<dyn File>>,
149 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
150 remote_selections: TreeMap<ReplicaId, SelectionSet>,
151 language: Option<Arc<Language>>,
152 non_text_state_update_count: usize,
153}
154
155/// The kind and amount of indentation in a particular line. For now,
156/// assumes that indentation is all the same character.
157#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
158pub struct IndentSize {
159 /// The number of bytes that comprise the indentation.
160 pub len: u32,
161 /// The kind of whitespace used for indentation.
162 pub kind: IndentKind,
163}
164
165/// A whitespace character that's used for indentation.
166#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
167pub enum IndentKind {
168 /// An ASCII space character.
169 #[default]
170 Space,
171 /// An ASCII tab character.
172 Tab,
173}
174
175/// The shape of a selection cursor.
176#[derive(Copy, Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq, JsonSchema)]
177#[serde(rename_all = "snake_case")]
178pub enum CursorShape {
179 /// A vertical bar
180 #[default]
181 Bar,
182 /// A block that surrounds the following character
183 Block,
184 /// An underline that runs along the following character
185 Underline,
186 /// A box drawn around the following character
187 Hollow,
188}
189
190#[derive(Clone, Debug)]
191struct SelectionSet {
192 line_mode: bool,
193 cursor_shape: CursorShape,
194 selections: Arc<[Selection<Anchor>]>,
195 lamport_timestamp: clock::Lamport,
196}
197
198/// A diagnostic associated with a certain range of a buffer.
199#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
200pub struct Diagnostic {
201 /// The name of the service that produced this diagnostic.
202 pub source: Option<String>,
203 /// A machine-readable code that identifies this diagnostic.
204 pub code: Option<NumberOrString>,
205 pub code_description: Option<lsp::Url>,
206 /// Whether this diagnostic is a hint, warning, or error.
207 pub severity: DiagnosticSeverity,
208 /// The human-readable message associated with this diagnostic.
209 pub message: String,
210 /// The human-readable message (in markdown format)
211 pub markdown: Option<String>,
212 /// An id that identifies the group to which this diagnostic belongs.
213 ///
214 /// When a language server produces a diagnostic with
215 /// one or more associated diagnostics, those diagnostics are all
216 /// assigned a single group ID.
217 pub group_id: usize,
218 /// Whether this diagnostic is the primary diagnostic for its group.
219 ///
220 /// In a given group, the primary diagnostic is the top-level diagnostic
221 /// returned by the language server. The non-primary diagnostics are the
222 /// associated diagnostics.
223 pub is_primary: bool,
224 /// Whether this diagnostic is considered to originate from an analysis of
225 /// files on disk, as opposed to any unsaved buffer contents. This is a
226 /// property of a given diagnostic source, and is configured for a given
227 /// language server via the [`LspAdapter::disk_based_diagnostic_sources`](crate::LspAdapter::disk_based_diagnostic_sources) method
228 /// for the language server.
229 pub is_disk_based: bool,
230 /// Whether this diagnostic marks unnecessary code.
231 pub is_unnecessary: bool,
232 /// Data from language server that produced this diagnostic. Passed back to the LS when we request code actions for this diagnostic.
233 pub data: Option<Value>,
234}
235
236/// An operation used to synchronize this buffer with its other replicas.
237#[derive(Clone, Debug, PartialEq)]
238pub enum Operation {
239 /// A text operation.
240 Buffer(text::Operation),
241
242 /// An update to the buffer's diagnostics.
243 UpdateDiagnostics {
244 /// The id of the language server that produced the new diagnostics.
245 server_id: LanguageServerId,
246 /// The diagnostics.
247 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
248 /// The buffer's lamport timestamp.
249 lamport_timestamp: clock::Lamport,
250 },
251
252 /// An update to the most recent selections in this buffer.
253 UpdateSelections {
254 /// The selections.
255 selections: Arc<[Selection<Anchor>]>,
256 /// The buffer's lamport timestamp.
257 lamport_timestamp: clock::Lamport,
258 /// Whether the selections are in 'line mode'.
259 line_mode: bool,
260 /// The [`CursorShape`] associated with these selections.
261 cursor_shape: CursorShape,
262 },
263
264 /// An update to the characters that should trigger autocompletion
265 /// for this buffer.
266 UpdateCompletionTriggers {
267 /// The characters that trigger autocompletion.
268 triggers: Vec<String>,
269 /// The buffer's lamport timestamp.
270 lamport_timestamp: clock::Lamport,
271 /// The language server ID.
272 server_id: LanguageServerId,
273 },
274}
275
276/// An event that occurs in a buffer.
277#[derive(Clone, Debug, PartialEq)]
278pub enum BufferEvent {
279 /// The buffer was changed in a way that must be
280 /// propagated to its other replicas.
281 Operation {
282 operation: Operation,
283 is_local: bool,
284 },
285 /// The buffer was edited.
286 Edited,
287 /// The buffer's `dirty` bit changed.
288 DirtyChanged,
289 /// The buffer was saved.
290 Saved,
291 /// The buffer's file was changed on disk.
292 FileHandleChanged,
293 /// The buffer was reloaded.
294 Reloaded,
295 /// The buffer is in need of a reload
296 ReloadNeeded,
297 /// The buffer's language was changed.
298 LanguageChanged,
299 /// The buffer's syntax trees were updated.
300 Reparsed,
301 /// The buffer's diagnostics were updated.
302 DiagnosticsUpdated,
303 /// The buffer gained or lost editing capabilities.
304 CapabilityChanged,
305 /// The buffer was explicitly requested to close.
306 Closed,
307 /// The buffer was discarded when closing.
308 Discarded,
309}
310
311/// The file associated with a buffer.
312pub trait File: Send + Sync + Any {
313 /// Returns the [`LocalFile`] associated with this file, if the
314 /// file is local.
315 fn as_local(&self) -> Option<&dyn LocalFile>;
316
317 /// Returns whether this file is local.
318 fn is_local(&self) -> bool {
319 self.as_local().is_some()
320 }
321
322 /// Returns whether the file is new, exists in storage, or has been deleted. Includes metadata
323 /// only available in some states, such as modification time.
324 fn disk_state(&self) -> DiskState;
325
326 /// Returns the path of this file relative to the worktree's root directory.
327 fn path(&self) -> &Arc<Path>;
328
329 /// Returns the path of this file relative to the worktree's parent directory (this means it
330 /// includes the name of the worktree's root folder).
331 fn full_path(&self, cx: &App) -> PathBuf;
332
333 /// Returns the last component of this handle's absolute path. If this handle refers to the root
334 /// of its worktree, then this method will return the name of the worktree itself.
335 fn file_name<'a>(&'a self, cx: &'a App) -> &'a OsStr;
336
337 /// Returns the id of the worktree to which this file belongs.
338 ///
339 /// This is needed for looking up project-specific settings.
340 fn worktree_id(&self, cx: &App) -> WorktreeId;
341
342 /// Converts this file into a protobuf message.
343 fn to_proto(&self, cx: &App) -> rpc::proto::File;
344
345 /// Return whether Zed considers this to be a private file.
346 fn is_private(&self) -> bool;
347}
348
349/// The file's storage status - whether it's stored (`Present`), and if so when it was last
350/// modified. In the case where the file is not stored, it can be either `New` or `Deleted`. In the
351/// UI these two states are distinguished. For example, the buffer tab does not display a deletion
352/// indicator for new files.
353#[derive(Copy, Clone, Debug, PartialEq)]
354pub enum DiskState {
355 /// File created in Zed that has not been saved.
356 New,
357 /// File present on the filesystem.
358 Present { mtime: MTime },
359 /// Deleted file that was previously present.
360 Deleted,
361}
362
363impl DiskState {
364 /// Returns the file's last known modification time on disk.
365 pub fn mtime(self) -> Option<MTime> {
366 match self {
367 DiskState::New => None,
368 DiskState::Present { mtime } => Some(mtime),
369 DiskState::Deleted => None,
370 }
371 }
372
373 pub fn exists(&self) -> bool {
374 match self {
375 DiskState::New => false,
376 DiskState::Present { .. } => true,
377 DiskState::Deleted => false,
378 }
379 }
380}
381
382/// The file associated with a buffer, in the case where the file is on the local disk.
383pub trait LocalFile: File {
384 /// Returns the absolute path of this file
385 fn abs_path(&self, cx: &App) -> PathBuf;
386
387 /// Loads the file contents from disk and returns them as a UTF-8 encoded string.
388 fn load(&self, cx: &App) -> Task<Result<String>>;
389
390 /// Loads the file's contents from disk.
391 fn load_bytes(&self, cx: &App) -> Task<Result<Vec<u8>>>;
392}
393
394/// The auto-indent behavior associated with an editing operation.
395/// For some editing operations, each affected line of text has its
396/// indentation recomputed. For other operations, the entire block
397/// of edited text is adjusted uniformly.
398#[derive(Clone, Debug)]
399pub enum AutoindentMode {
400 /// Indent each line of inserted text.
401 EachLine,
402 /// Apply the same indentation adjustment to all of the lines
403 /// in a given insertion.
404 Block {
405 /// The original indentation column of the first line of each
406 /// insertion, if it has been copied.
407 ///
408 /// Knowing this makes it possible to preserve the relative indentation
409 /// of every line in the insertion from when it was copied.
410 ///
411 /// If the original indent column is `a`, and the first line of insertion
412 /// is then auto-indented to column `b`, then every other line of
413 /// the insertion will be auto-indented to column `b - a`
414 original_indent_columns: Vec<Option<u32>>,
415 },
416}
417
418#[derive(Clone)]
419struct AutoindentRequest {
420 before_edit: BufferSnapshot,
421 entries: Vec<AutoindentRequestEntry>,
422 is_block_mode: bool,
423 ignore_empty_lines: bool,
424}
425
426#[derive(Debug, Clone)]
427struct AutoindentRequestEntry {
428 /// A range of the buffer whose indentation should be adjusted.
429 range: Range<Anchor>,
430 /// Whether or not these lines should be considered brand new, for the
431 /// purpose of auto-indent. When text is not new, its indentation will
432 /// only be adjusted if the suggested indentation level has *changed*
433 /// since the edit was made.
434 first_line_is_new: bool,
435 indent_size: IndentSize,
436 original_indent_column: Option<u32>,
437}
438
439#[derive(Debug)]
440struct IndentSuggestion {
441 basis_row: u32,
442 delta: Ordering,
443 within_error: bool,
444}
445
446struct BufferChunkHighlights<'a> {
447 captures: SyntaxMapCaptures<'a>,
448 next_capture: Option<SyntaxMapCapture<'a>>,
449 stack: Vec<(usize, HighlightId)>,
450 highlight_maps: Vec<HighlightMap>,
451}
452
453/// An iterator that yields chunks of a buffer's text, along with their
454/// syntax highlights and diagnostic status.
455pub struct BufferChunks<'a> {
456 buffer_snapshot: Option<&'a BufferSnapshot>,
457 range: Range<usize>,
458 chunks: text::Chunks<'a>,
459 diagnostic_endpoints: Option<Peekable<vec::IntoIter<DiagnosticEndpoint>>>,
460 error_depth: usize,
461 warning_depth: usize,
462 information_depth: usize,
463 hint_depth: usize,
464 unnecessary_depth: usize,
465 highlights: Option<BufferChunkHighlights<'a>>,
466}
467
468/// A chunk of a buffer's text, along with its syntax highlight and
469/// diagnostic status.
470#[derive(Clone, Debug, Default)]
471pub struct Chunk<'a> {
472 /// The text of the chunk.
473 pub text: &'a str,
474 /// The syntax highlighting style of the chunk.
475 pub syntax_highlight_id: Option<HighlightId>,
476 /// The highlight style that has been applied to this chunk in
477 /// the editor.
478 pub highlight_style: Option<HighlightStyle>,
479 /// The severity of diagnostic associated with this chunk, if any.
480 pub diagnostic_severity: Option<DiagnosticSeverity>,
481 /// Whether this chunk of text is marked as unnecessary.
482 pub is_unnecessary: bool,
483 /// Whether this chunk of text was originally a tab character.
484 pub is_tab: bool,
485}
486
487/// A set of edits to a given version of a buffer, computed asynchronously.
488#[derive(Debug)]
489pub struct Diff {
490 pub base_version: clock::Global,
491 pub line_ending: LineEnding,
492 pub edits: Vec<(Range<usize>, Arc<str>)>,
493}
494
495#[derive(Clone, Copy)]
496pub(crate) struct DiagnosticEndpoint {
497 offset: usize,
498 is_start: bool,
499 severity: DiagnosticSeverity,
500 is_unnecessary: bool,
501}
502
503/// A class of characters, used for characterizing a run of text.
504#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
505pub enum CharKind {
506 /// Whitespace.
507 Whitespace,
508 /// Punctuation.
509 Punctuation,
510 /// Word.
511 Word,
512}
513
514/// A runnable is a set of data about a region that could be resolved into a task
515pub struct Runnable {
516 pub tags: SmallVec<[RunnableTag; 1]>,
517 pub language: Arc<Language>,
518 pub buffer: BufferId,
519}
520
521#[derive(Default, Clone, Debug)]
522pub struct HighlightedText {
523 pub text: SharedString,
524 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
525}
526
527#[derive(Default, Debug)]
528struct HighlightedTextBuilder {
529 pub text: String,
530 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
531}
532
533impl HighlightedText {
534 pub fn from_buffer_range<T: ToOffset>(
535 range: Range<T>,
536 snapshot: &text::BufferSnapshot,
537 syntax_snapshot: &SyntaxSnapshot,
538 override_style: Option<HighlightStyle>,
539 syntax_theme: &SyntaxTheme,
540 ) -> Self {
541 let mut highlighted_text = HighlightedTextBuilder::default();
542 highlighted_text.add_text_from_buffer_range(
543 range,
544 snapshot,
545 syntax_snapshot,
546 override_style,
547 syntax_theme,
548 );
549 highlighted_text.build()
550 }
551
552 pub fn to_styled_text(&self, default_style: &TextStyle) -> StyledText {
553 gpui::StyledText::new(self.text.clone())
554 .with_default_highlights(default_style, self.highlights.iter().cloned())
555 }
556
557 /// Returns the first line without leading whitespace unless highlighted
558 /// and a boolean indicating if there are more lines after
559 pub fn first_line_preview(self) -> (Self, bool) {
560 let newline_ix = self.text.find('\n').unwrap_or(self.text.len());
561 let first_line = &self.text[..newline_ix];
562
563 // Trim leading whitespace, unless an edit starts prior to it.
564 let mut preview_start_ix = first_line.len() - first_line.trim_start().len();
565 if let Some((first_highlight_range, _)) = self.highlights.first() {
566 preview_start_ix = preview_start_ix.min(first_highlight_range.start);
567 }
568
569 let preview_text = &first_line[preview_start_ix..];
570 let preview_highlights = self
571 .highlights
572 .into_iter()
573 .take_while(|(range, _)| range.start < newline_ix)
574 .filter_map(|(mut range, highlight)| {
575 range.start = range.start.saturating_sub(preview_start_ix);
576 range.end = range.end.saturating_sub(preview_start_ix).min(newline_ix);
577 if range.is_empty() {
578 None
579 } else {
580 Some((range, highlight))
581 }
582 });
583
584 let preview = Self {
585 text: SharedString::new(preview_text),
586 highlights: preview_highlights.collect(),
587 };
588
589 (preview, self.text.len() > newline_ix)
590 }
591}
592
593impl HighlightedTextBuilder {
594 pub fn build(self) -> HighlightedText {
595 HighlightedText {
596 text: self.text.into(),
597 highlights: self.highlights,
598 }
599 }
600
601 pub fn add_text_from_buffer_range<T: ToOffset>(
602 &mut self,
603 range: Range<T>,
604 snapshot: &text::BufferSnapshot,
605 syntax_snapshot: &SyntaxSnapshot,
606 override_style: Option<HighlightStyle>,
607 syntax_theme: &SyntaxTheme,
608 ) {
609 let range = range.to_offset(snapshot);
610 for chunk in Self::highlighted_chunks(range, snapshot, syntax_snapshot) {
611 let start = self.text.len();
612 self.text.push_str(chunk.text);
613 let end = self.text.len();
614
615 if let Some(mut highlight_style) = chunk
616 .syntax_highlight_id
617 .and_then(|id| id.style(syntax_theme))
618 {
619 if let Some(override_style) = override_style {
620 highlight_style.highlight(override_style);
621 }
622 self.highlights.push((start..end, highlight_style));
623 } else if let Some(override_style) = override_style {
624 self.highlights.push((start..end, override_style));
625 }
626 }
627 }
628
629 fn highlighted_chunks<'a>(
630 range: Range<usize>,
631 snapshot: &'a text::BufferSnapshot,
632 syntax_snapshot: &'a SyntaxSnapshot,
633 ) -> BufferChunks<'a> {
634 let captures = syntax_snapshot.captures(range.clone(), snapshot, |grammar| {
635 grammar.highlights_query.as_ref()
636 });
637
638 let highlight_maps = captures
639 .grammars()
640 .iter()
641 .map(|grammar| grammar.highlight_map())
642 .collect();
643
644 BufferChunks::new(
645 snapshot.as_rope(),
646 range,
647 Some((captures, highlight_maps)),
648 false,
649 None,
650 )
651 }
652}
653
654#[derive(Clone)]
655pub struct EditPreview {
656 old_snapshot: text::BufferSnapshot,
657 applied_edits_snapshot: text::BufferSnapshot,
658 syntax_snapshot: SyntaxSnapshot,
659}
660
661impl EditPreview {
662 pub fn highlight_edits(
663 &self,
664 current_snapshot: &BufferSnapshot,
665 edits: &[(Range<Anchor>, String)],
666 include_deletions: bool,
667 cx: &App,
668 ) -> HighlightedText {
669 let Some(visible_range_in_preview_snapshot) = self.compute_visible_range(edits) else {
670 return HighlightedText::default();
671 };
672
673 let mut highlighted_text = HighlightedTextBuilder::default();
674
675 let mut offset_in_preview_snapshot = visible_range_in_preview_snapshot.start;
676
677 let insertion_highlight_style = HighlightStyle {
678 background_color: Some(cx.theme().status().created_background),
679 ..Default::default()
680 };
681 let deletion_highlight_style = HighlightStyle {
682 background_color: Some(cx.theme().status().deleted_background),
683 ..Default::default()
684 };
685 let syntax_theme = cx.theme().syntax();
686
687 for (range, edit_text) in edits {
688 let edit_new_end_in_preview_snapshot = range
689 .end
690 .bias_right(&self.old_snapshot)
691 .to_offset(&self.applied_edits_snapshot);
692 let edit_start_in_preview_snapshot = edit_new_end_in_preview_snapshot - edit_text.len();
693
694 let unchanged_range_in_preview_snapshot =
695 offset_in_preview_snapshot..edit_start_in_preview_snapshot;
696 if !unchanged_range_in_preview_snapshot.is_empty() {
697 highlighted_text.add_text_from_buffer_range(
698 unchanged_range_in_preview_snapshot,
699 &self.applied_edits_snapshot,
700 &self.syntax_snapshot,
701 None,
702 &syntax_theme,
703 );
704 }
705
706 let range_in_current_snapshot = range.to_offset(current_snapshot);
707 if include_deletions && !range_in_current_snapshot.is_empty() {
708 highlighted_text.add_text_from_buffer_range(
709 range_in_current_snapshot,
710 ¤t_snapshot.text,
711 ¤t_snapshot.syntax,
712 Some(deletion_highlight_style),
713 &syntax_theme,
714 );
715 }
716
717 if !edit_text.is_empty() {
718 highlighted_text.add_text_from_buffer_range(
719 edit_start_in_preview_snapshot..edit_new_end_in_preview_snapshot,
720 &self.applied_edits_snapshot,
721 &self.syntax_snapshot,
722 Some(insertion_highlight_style),
723 &syntax_theme,
724 );
725 }
726
727 offset_in_preview_snapshot = edit_new_end_in_preview_snapshot;
728 }
729
730 highlighted_text.add_text_from_buffer_range(
731 offset_in_preview_snapshot..visible_range_in_preview_snapshot.end,
732 &self.applied_edits_snapshot,
733 &self.syntax_snapshot,
734 None,
735 &syntax_theme,
736 );
737
738 highlighted_text.build()
739 }
740
741 fn compute_visible_range(&self, edits: &[(Range<Anchor>, String)]) -> Option<Range<usize>> {
742 let (first, _) = edits.first()?;
743 let (last, _) = edits.last()?;
744
745 let start = first
746 .start
747 .bias_left(&self.old_snapshot)
748 .to_point(&self.applied_edits_snapshot);
749 let end = last
750 .end
751 .bias_right(&self.old_snapshot)
752 .to_point(&self.applied_edits_snapshot);
753
754 // Ensure that the first line of the first edit and the last line of the last edit are always fully visible
755 let range = Point::new(start.row, 0)
756 ..Point::new(end.row, self.applied_edits_snapshot.line_len(end.row));
757
758 Some(range.to_offset(&self.applied_edits_snapshot))
759 }
760}
761
762#[derive(Clone, Debug, PartialEq, Eq)]
763pub struct BracketMatch {
764 pub open_range: Range<usize>,
765 pub close_range: Range<usize>,
766 pub newline_only: bool,
767}
768
769impl Buffer {
770 /// Create a new buffer with the given base text.
771 pub fn local<T: Into<String>>(base_text: T, cx: &Context<Self>) -> Self {
772 Self::build(
773 TextBuffer::new(0, cx.entity_id().as_non_zero_u64().into(), base_text.into()),
774 None,
775 Capability::ReadWrite,
776 )
777 }
778
779 /// Create a new buffer with the given base text that has proper line endings and other normalization applied.
780 pub fn local_normalized(
781 base_text_normalized: Rope,
782 line_ending: LineEnding,
783 cx: &Context<Self>,
784 ) -> Self {
785 Self::build(
786 TextBuffer::new_normalized(
787 0,
788 cx.entity_id().as_non_zero_u64().into(),
789 line_ending,
790 base_text_normalized,
791 ),
792 None,
793 Capability::ReadWrite,
794 )
795 }
796
797 /// Create a new buffer that is a replica of a remote buffer.
798 pub fn remote(
799 remote_id: BufferId,
800 replica_id: ReplicaId,
801 capability: Capability,
802 base_text: impl Into<String>,
803 ) -> Self {
804 Self::build(
805 TextBuffer::new(replica_id, remote_id, base_text.into()),
806 None,
807 capability,
808 )
809 }
810
811 /// Create a new buffer that is a replica of a remote buffer, populating its
812 /// state from the given protobuf message.
813 pub fn from_proto(
814 replica_id: ReplicaId,
815 capability: Capability,
816 message: proto::BufferState,
817 file: Option<Arc<dyn File>>,
818 ) -> Result<Self> {
819 let buffer_id = BufferId::new(message.id)
820 .with_context(|| anyhow!("Could not deserialize buffer_id"))?;
821 let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text);
822 let mut this = Self::build(buffer, file, capability);
823 this.text.set_line_ending(proto::deserialize_line_ending(
824 rpc::proto::LineEnding::from_i32(message.line_ending)
825 .ok_or_else(|| anyhow!("missing line_ending"))?,
826 ));
827 this.saved_version = proto::deserialize_version(&message.saved_version);
828 this.saved_mtime = message.saved_mtime.map(|time| time.into());
829 Ok(this)
830 }
831
832 /// Serialize the buffer's state to a protobuf message.
833 pub fn to_proto(&self, cx: &App) -> proto::BufferState {
834 proto::BufferState {
835 id: self.remote_id().into(),
836 file: self.file.as_ref().map(|f| f.to_proto(cx)),
837 base_text: self.base_text().to_string(),
838 line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
839 saved_version: proto::serialize_version(&self.saved_version),
840 saved_mtime: self.saved_mtime.map(|time| time.into()),
841 }
842 }
843
844 /// Serialize as protobufs all of the changes to the buffer since the given version.
845 pub fn serialize_ops(
846 &self,
847 since: Option<clock::Global>,
848 cx: &App,
849 ) -> Task<Vec<proto::Operation>> {
850 let mut operations = Vec::new();
851 operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
852
853 operations.extend(self.remote_selections.iter().map(|(_, set)| {
854 proto::serialize_operation(&Operation::UpdateSelections {
855 selections: set.selections.clone(),
856 lamport_timestamp: set.lamport_timestamp,
857 line_mode: set.line_mode,
858 cursor_shape: set.cursor_shape,
859 })
860 }));
861
862 for (server_id, diagnostics) in &self.diagnostics {
863 operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
864 lamport_timestamp: self.diagnostics_timestamp,
865 server_id: *server_id,
866 diagnostics: diagnostics.iter().cloned().collect(),
867 }));
868 }
869
870 for (server_id, completions) in &self.completion_triggers_per_language_server {
871 operations.push(proto::serialize_operation(
872 &Operation::UpdateCompletionTriggers {
873 triggers: completions.iter().cloned().collect(),
874 lamport_timestamp: self.completion_triggers_timestamp,
875 server_id: *server_id,
876 },
877 ));
878 }
879
880 let text_operations = self.text.operations().clone();
881 cx.background_spawn(async move {
882 let since = since.unwrap_or_default();
883 operations.extend(
884 text_operations
885 .iter()
886 .filter(|(_, op)| !since.observed(op.timestamp()))
887 .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
888 );
889 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
890 operations
891 })
892 }
893
894 /// Assign a language to the buffer, returning the buffer.
895 pub fn with_language(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
896 self.set_language(Some(language), cx);
897 self
898 }
899
900 /// Returns the [`Capability`] of this buffer.
901 pub fn capability(&self) -> Capability {
902 self.capability
903 }
904
905 /// Whether this buffer can only be read.
906 pub fn read_only(&self) -> bool {
907 self.capability == Capability::ReadOnly
908 }
909
910 /// Builds a [`Buffer`] with the given underlying [`TextBuffer`], diff base, [`File`] and [`Capability`].
911 pub fn build(buffer: TextBuffer, file: Option<Arc<dyn File>>, capability: Capability) -> Self {
912 let saved_mtime = file.as_ref().and_then(|file| file.disk_state().mtime());
913 let snapshot = buffer.snapshot();
914 let syntax_map = Mutex::new(SyntaxMap::new(&snapshot));
915 Self {
916 saved_mtime,
917 saved_version: buffer.version(),
918 preview_version: buffer.version(),
919 reload_task: None,
920 transaction_depth: 0,
921 was_dirty_before_starting_transaction: None,
922 has_unsaved_edits: Cell::new((buffer.version(), false)),
923 text: buffer,
924 branch_state: None,
925 file,
926 capability,
927 syntax_map,
928 reparse: None,
929 non_text_state_update_count: 0,
930 sync_parse_timeout: Duration::from_millis(1),
931 parse_status: async_watch::channel(ParseStatus::Idle),
932 autoindent_requests: Default::default(),
933 pending_autoindent: Default::default(),
934 language: None,
935 remote_selections: Default::default(),
936 diagnostics: Default::default(),
937 diagnostics_timestamp: Default::default(),
938 completion_triggers: Default::default(),
939 completion_triggers_per_language_server: Default::default(),
940 completion_triggers_timestamp: Default::default(),
941 deferred_ops: OperationQueue::new(),
942 has_conflict: false,
943 change_bits: Default::default(),
944 _subscriptions: Vec::new(),
945 }
946 }
947
948 pub fn build_snapshot(
949 text: Rope,
950 language: Option<Arc<Language>>,
951 language_registry: Option<Arc<LanguageRegistry>>,
952 cx: &mut App,
953 ) -> impl Future<Output = BufferSnapshot> + use<> {
954 let entity_id = cx.reserve_entity::<Self>().entity_id();
955 let buffer_id = entity_id.as_non_zero_u64().into();
956 async move {
957 let text =
958 TextBuffer::new_normalized(0, buffer_id, Default::default(), text).snapshot();
959 let mut syntax = SyntaxMap::new(&text).snapshot();
960 if let Some(language) = language.clone() {
961 let text = text.clone();
962 let language = language.clone();
963 let language_registry = language_registry.clone();
964 syntax.reparse(&text, language_registry, language);
965 }
966 BufferSnapshot {
967 text,
968 syntax,
969 file: None,
970 diagnostics: Default::default(),
971 remote_selections: Default::default(),
972 language,
973 non_text_state_update_count: 0,
974 }
975 }
976 }
977
978 pub fn build_empty_snapshot(cx: &mut App) -> BufferSnapshot {
979 let entity_id = cx.reserve_entity::<Self>().entity_id();
980 let buffer_id = entity_id.as_non_zero_u64().into();
981 let text =
982 TextBuffer::new_normalized(0, buffer_id, Default::default(), Rope::new()).snapshot();
983 let syntax = SyntaxMap::new(&text).snapshot();
984 BufferSnapshot {
985 text,
986 syntax,
987 file: None,
988 diagnostics: Default::default(),
989 remote_selections: Default::default(),
990 language: None,
991 non_text_state_update_count: 0,
992 }
993 }
994
995 #[cfg(any(test, feature = "test-support"))]
996 pub fn build_snapshot_sync(
997 text: Rope,
998 language: Option<Arc<Language>>,
999 language_registry: Option<Arc<LanguageRegistry>>,
1000 cx: &mut App,
1001 ) -> BufferSnapshot {
1002 let entity_id = cx.reserve_entity::<Self>().entity_id();
1003 let buffer_id = entity_id.as_non_zero_u64().into();
1004 let text = TextBuffer::new_normalized(0, buffer_id, Default::default(), text).snapshot();
1005 let mut syntax = SyntaxMap::new(&text).snapshot();
1006 if let Some(language) = language.clone() {
1007 let text = text.clone();
1008 let language = language.clone();
1009 let language_registry = language_registry.clone();
1010 syntax.reparse(&text, language_registry, language);
1011 }
1012 BufferSnapshot {
1013 text,
1014 syntax,
1015 file: None,
1016 diagnostics: Default::default(),
1017 remote_selections: Default::default(),
1018 language,
1019 non_text_state_update_count: 0,
1020 }
1021 }
1022
1023 /// Retrieve a snapshot of the buffer's current state. This is computationally
1024 /// cheap, and allows reading from the buffer on a background thread.
1025 pub fn snapshot(&self) -> BufferSnapshot {
1026 let text = self.text.snapshot();
1027 let mut syntax_map = self.syntax_map.lock();
1028 syntax_map.interpolate(&text);
1029 let syntax = syntax_map.snapshot();
1030
1031 BufferSnapshot {
1032 text,
1033 syntax,
1034 file: self.file.clone(),
1035 remote_selections: self.remote_selections.clone(),
1036 diagnostics: self.diagnostics.clone(),
1037 language: self.language.clone(),
1038 non_text_state_update_count: self.non_text_state_update_count,
1039 }
1040 }
1041
1042 pub fn branch(&mut self, cx: &mut Context<Self>) -> Entity<Self> {
1043 let this = cx.entity();
1044 cx.new(|cx| {
1045 let mut branch = Self {
1046 branch_state: Some(BufferBranchState {
1047 base_buffer: this.clone(),
1048 merged_operations: Default::default(),
1049 }),
1050 language: self.language.clone(),
1051 has_conflict: self.has_conflict,
1052 has_unsaved_edits: Cell::new(self.has_unsaved_edits.get_mut().clone()),
1053 _subscriptions: vec![cx.subscribe(&this, Self::on_base_buffer_event)],
1054 ..Self::build(self.text.branch(), self.file.clone(), self.capability())
1055 };
1056 if let Some(language_registry) = self.language_registry() {
1057 branch.set_language_registry(language_registry);
1058 }
1059
1060 // Reparse the branch buffer so that we get syntax highlighting immediately.
1061 branch.reparse(cx);
1062
1063 branch
1064 })
1065 }
1066
1067 pub fn preview_edits(
1068 &self,
1069 edits: Arc<[(Range<Anchor>, String)]>,
1070 cx: &App,
1071 ) -> Task<EditPreview> {
1072 let registry = self.language_registry();
1073 let language = self.language().cloned();
1074 let old_snapshot = self.text.snapshot();
1075 let mut branch_buffer = self.text.branch();
1076 let mut syntax_snapshot = self.syntax_map.lock().snapshot();
1077 cx.background_spawn(async move {
1078 if !edits.is_empty() {
1079 if let Some(language) = language.clone() {
1080 syntax_snapshot.reparse(&old_snapshot, registry.clone(), language);
1081 }
1082
1083 branch_buffer.edit(edits.iter().cloned());
1084 let snapshot = branch_buffer.snapshot();
1085 syntax_snapshot.interpolate(&snapshot);
1086
1087 if let Some(language) = language {
1088 syntax_snapshot.reparse(&snapshot, registry, language);
1089 }
1090 }
1091 EditPreview {
1092 old_snapshot,
1093 applied_edits_snapshot: branch_buffer.snapshot(),
1094 syntax_snapshot,
1095 }
1096 })
1097 }
1098
1099 /// Applies all of the changes in this buffer that intersect any of the
1100 /// given `ranges` to its base buffer.
1101 ///
1102 /// If `ranges` is empty, then all changes will be applied. This buffer must
1103 /// be a branch buffer to call this method.
1104 pub fn merge_into_base(&mut self, ranges: Vec<Range<usize>>, cx: &mut Context<Self>) {
1105 let Some(base_buffer) = self.base_buffer() else {
1106 debug_panic!("not a branch buffer");
1107 return;
1108 };
1109
1110 let mut ranges = if ranges.is_empty() {
1111 &[0..usize::MAX]
1112 } else {
1113 ranges.as_slice()
1114 }
1115 .into_iter()
1116 .peekable();
1117
1118 let mut edits = Vec::new();
1119 for edit in self.edits_since::<usize>(&base_buffer.read(cx).version()) {
1120 let mut is_included = false;
1121 while let Some(range) = ranges.peek() {
1122 if range.end < edit.new.start {
1123 ranges.next().unwrap();
1124 } else {
1125 if range.start <= edit.new.end {
1126 is_included = true;
1127 }
1128 break;
1129 }
1130 }
1131
1132 if is_included {
1133 edits.push((
1134 edit.old.clone(),
1135 self.text_for_range(edit.new.clone()).collect::<String>(),
1136 ));
1137 }
1138 }
1139
1140 let operation = base_buffer.update(cx, |base_buffer, cx| {
1141 // cx.emit(BufferEvent::DiffBaseChanged);
1142 base_buffer.edit(edits, None, cx)
1143 });
1144
1145 if let Some(operation) = operation {
1146 if let Some(BufferBranchState {
1147 merged_operations, ..
1148 }) = &mut self.branch_state
1149 {
1150 merged_operations.push(operation);
1151 }
1152 }
1153 }
1154
1155 fn on_base_buffer_event(
1156 &mut self,
1157 _: Entity<Buffer>,
1158 event: &BufferEvent,
1159 cx: &mut Context<Self>,
1160 ) {
1161 let BufferEvent::Operation { operation, .. } = event else {
1162 return;
1163 };
1164 let Some(BufferBranchState {
1165 merged_operations, ..
1166 }) = &mut self.branch_state
1167 else {
1168 return;
1169 };
1170
1171 let mut operation_to_undo = None;
1172 if let Operation::Buffer(text::Operation::Edit(operation)) = &operation {
1173 if let Ok(ix) = merged_operations.binary_search(&operation.timestamp) {
1174 merged_operations.remove(ix);
1175 operation_to_undo = Some(operation.timestamp);
1176 }
1177 }
1178
1179 self.apply_ops([operation.clone()], cx);
1180
1181 if let Some(timestamp) = operation_to_undo {
1182 let counts = [(timestamp, u32::MAX)].into_iter().collect();
1183 self.undo_operations(counts, cx);
1184 }
1185 }
1186
1187 #[cfg(test)]
1188 pub(crate) fn as_text_snapshot(&self) -> &text::BufferSnapshot {
1189 &self.text
1190 }
1191
1192 /// Retrieve a snapshot of the buffer's raw text, without any
1193 /// language-related state like the syntax tree or diagnostics.
1194 pub fn text_snapshot(&self) -> text::BufferSnapshot {
1195 self.text.snapshot()
1196 }
1197
1198 /// The file associated with the buffer, if any.
1199 pub fn file(&self) -> Option<&Arc<dyn File>> {
1200 self.file.as_ref()
1201 }
1202
1203 /// The version of the buffer that was last saved or reloaded from disk.
1204 pub fn saved_version(&self) -> &clock::Global {
1205 &self.saved_version
1206 }
1207
1208 /// The mtime of the buffer's file when the buffer was last saved or reloaded from disk.
1209 pub fn saved_mtime(&self) -> Option<MTime> {
1210 self.saved_mtime
1211 }
1212
1213 /// Assign a language to the buffer.
1214 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1215 self.non_text_state_update_count += 1;
1216 self.syntax_map.lock().clear(&self.text);
1217 self.language = language;
1218 self.was_changed();
1219 self.reparse(cx);
1220 cx.emit(BufferEvent::LanguageChanged);
1221 }
1222
1223 /// Assign a language registry to the buffer. This allows the buffer to retrieve
1224 /// other languages if parts of the buffer are written in different languages.
1225 pub fn set_language_registry(&self, language_registry: Arc<LanguageRegistry>) {
1226 self.syntax_map
1227 .lock()
1228 .set_language_registry(language_registry);
1229 }
1230
1231 pub fn language_registry(&self) -> Option<Arc<LanguageRegistry>> {
1232 self.syntax_map.lock().language_registry()
1233 }
1234
1235 /// Assign the buffer a new [`Capability`].
1236 pub fn set_capability(&mut self, capability: Capability, cx: &mut Context<Self>) {
1237 self.capability = capability;
1238 cx.emit(BufferEvent::CapabilityChanged)
1239 }
1240
1241 /// This method is called to signal that the buffer has been saved.
1242 pub fn did_save(
1243 &mut self,
1244 version: clock::Global,
1245 mtime: Option<MTime>,
1246 cx: &mut Context<Self>,
1247 ) {
1248 self.saved_version = version;
1249 self.has_unsaved_edits
1250 .set((self.saved_version().clone(), false));
1251 self.has_conflict = false;
1252 self.saved_mtime = mtime;
1253 self.was_changed();
1254 cx.emit(BufferEvent::Saved);
1255 cx.notify();
1256 }
1257
1258 /// This method is called to signal that the buffer has been discarded.
1259 pub fn discarded(&self, cx: &mut Context<Self>) {
1260 cx.emit(BufferEvent::Discarded);
1261 cx.notify();
1262 }
1263
1264 /// Reloads the contents of the buffer from disk.
1265 pub fn reload(&mut self, cx: &Context<Self>) -> oneshot::Receiver<Option<Transaction>> {
1266 let (tx, rx) = futures::channel::oneshot::channel();
1267 let prev_version = self.text.version();
1268 self.reload_task = Some(cx.spawn(async move |this, cx| {
1269 let Some((new_mtime, new_text)) = this.update(cx, |this, cx| {
1270 let file = this.file.as_ref()?.as_local()?;
1271
1272 Some((file.disk_state().mtime(), file.load(cx)))
1273 })?
1274 else {
1275 return Ok(());
1276 };
1277
1278 let new_text = new_text.await?;
1279 let diff = this
1280 .update(cx, |this, cx| this.diff(new_text.clone(), cx))?
1281 .await;
1282 this.update(cx, |this, cx| {
1283 if this.version() == diff.base_version {
1284 this.finalize_last_transaction();
1285 this.apply_diff(diff, cx);
1286 tx.send(this.finalize_last_transaction().cloned()).ok();
1287 this.has_conflict = false;
1288 this.did_reload(this.version(), this.line_ending(), new_mtime, cx);
1289 } else {
1290 if !diff.edits.is_empty()
1291 || this
1292 .edits_since::<usize>(&diff.base_version)
1293 .next()
1294 .is_some()
1295 {
1296 this.has_conflict = true;
1297 }
1298
1299 this.did_reload(prev_version, this.line_ending(), this.saved_mtime, cx);
1300 }
1301
1302 this.reload_task.take();
1303 })
1304 }));
1305 rx
1306 }
1307
1308 /// This method is called to signal that the buffer has been reloaded.
1309 pub fn did_reload(
1310 &mut self,
1311 version: clock::Global,
1312 line_ending: LineEnding,
1313 mtime: Option<MTime>,
1314 cx: &mut Context<Self>,
1315 ) {
1316 self.saved_version = version;
1317 self.has_unsaved_edits
1318 .set((self.saved_version.clone(), false));
1319 self.text.set_line_ending(line_ending);
1320 self.saved_mtime = mtime;
1321 cx.emit(BufferEvent::Reloaded);
1322 cx.notify();
1323 }
1324
1325 /// Updates the [`File`] backing this buffer. This should be called when
1326 /// the file has changed or has been deleted.
1327 pub fn file_updated(&mut self, new_file: Arc<dyn File>, cx: &mut Context<Self>) {
1328 let was_dirty = self.is_dirty();
1329 let mut file_changed = false;
1330
1331 if let Some(old_file) = self.file.as_ref() {
1332 if new_file.path() != old_file.path() {
1333 file_changed = true;
1334 }
1335
1336 let old_state = old_file.disk_state();
1337 let new_state = new_file.disk_state();
1338 if old_state != new_state {
1339 file_changed = true;
1340 if !was_dirty && matches!(new_state, DiskState::Present { .. }) {
1341 cx.emit(BufferEvent::ReloadNeeded)
1342 }
1343 }
1344 } else {
1345 file_changed = true;
1346 };
1347
1348 self.file = Some(new_file);
1349 if file_changed {
1350 self.was_changed();
1351 self.non_text_state_update_count += 1;
1352 if was_dirty != self.is_dirty() {
1353 cx.emit(BufferEvent::DirtyChanged);
1354 }
1355 cx.emit(BufferEvent::FileHandleChanged);
1356 cx.notify();
1357 }
1358 }
1359
1360 pub fn base_buffer(&self) -> Option<Entity<Self>> {
1361 Some(self.branch_state.as_ref()?.base_buffer.clone())
1362 }
1363
1364 /// Returns the primary [`Language`] assigned to this [`Buffer`].
1365 pub fn language(&self) -> Option<&Arc<Language>> {
1366 self.language.as_ref()
1367 }
1368
1369 /// Returns the [`Language`] at the given location.
1370 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
1371 let offset = position.to_offset(self);
1372 self.syntax_map
1373 .lock()
1374 .layers_for_range(offset..offset, &self.text, false)
1375 .last()
1376 .map(|info| info.language.clone())
1377 .or_else(|| self.language.clone())
1378 }
1379
1380 /// Returns each [`Language`] for the active syntax layers at the given location.
1381 pub fn languages_at<D: ToOffset>(&self, position: D) -> Vec<Arc<Language>> {
1382 let offset = position.to_offset(self);
1383 let mut languages: Vec<Arc<Language>> = self
1384 .syntax_map
1385 .lock()
1386 .layers_for_range(offset..offset, &self.text, false)
1387 .map(|info| info.language.clone())
1388 .collect();
1389
1390 if languages.is_empty() {
1391 if let Some(buffer_language) = self.language() {
1392 languages.push(buffer_language.clone());
1393 }
1394 }
1395
1396 languages
1397 }
1398
1399 /// An integer version number that accounts for all updates besides
1400 /// the buffer's text itself (which is versioned via a version vector).
1401 pub fn non_text_state_update_count(&self) -> usize {
1402 self.non_text_state_update_count
1403 }
1404
1405 /// Whether the buffer is being parsed in the background.
1406 #[cfg(any(test, feature = "test-support"))]
1407 pub fn is_parsing(&self) -> bool {
1408 self.reparse.is_some()
1409 }
1410
1411 /// Indicates whether the buffer contains any regions that may be
1412 /// written in a language that hasn't been loaded yet.
1413 pub fn contains_unknown_injections(&self) -> bool {
1414 self.syntax_map.lock().contains_unknown_injections()
1415 }
1416
1417 #[cfg(test)]
1418 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
1419 self.sync_parse_timeout = timeout;
1420 }
1421
1422 /// Called after an edit to synchronize the buffer's main parse tree with
1423 /// the buffer's new underlying state.
1424 ///
1425 /// Locks the syntax map and interpolates the edits since the last reparse
1426 /// into the foreground syntax tree.
1427 ///
1428 /// Then takes a stable snapshot of the syntax map before unlocking it.
1429 /// The snapshot with the interpolated edits is sent to a background thread,
1430 /// where we ask Tree-sitter to perform an incremental parse.
1431 ///
1432 /// Meanwhile, in the foreground, we block the main thread for up to 1ms
1433 /// waiting on the parse to complete. As soon as it completes, we proceed
1434 /// synchronously, unless a 1ms timeout elapses.
1435 ///
1436 /// If we time out waiting on the parse, we spawn a second task waiting
1437 /// until the parse does complete and return with the interpolated tree still
1438 /// in the foreground. When the background parse completes, call back into
1439 /// the main thread and assign the foreground parse state.
1440 ///
1441 /// If the buffer or grammar changed since the start of the background parse,
1442 /// initiate an additional reparse recursively. To avoid concurrent parses
1443 /// for the same buffer, we only initiate a new parse if we are not already
1444 /// parsing in the background.
1445 pub fn reparse(&mut self, cx: &mut Context<Self>) {
1446 if self.reparse.is_some() {
1447 return;
1448 }
1449 let language = if let Some(language) = self.language.clone() {
1450 language
1451 } else {
1452 return;
1453 };
1454
1455 let text = self.text_snapshot();
1456 let parsed_version = self.version();
1457
1458 let mut syntax_map = self.syntax_map.lock();
1459 syntax_map.interpolate(&text);
1460 let language_registry = syntax_map.language_registry();
1461 let mut syntax_snapshot = syntax_map.snapshot();
1462 drop(syntax_map);
1463
1464 let parse_task = cx.background_spawn({
1465 let language = language.clone();
1466 let language_registry = language_registry.clone();
1467 async move {
1468 syntax_snapshot.reparse(&text, language_registry, language);
1469 syntax_snapshot
1470 }
1471 });
1472
1473 self.parse_status.0.send(ParseStatus::Parsing).unwrap();
1474 match cx
1475 .background_executor()
1476 .block_with_timeout(self.sync_parse_timeout, parse_task)
1477 {
1478 Ok(new_syntax_snapshot) => {
1479 self.did_finish_parsing(new_syntax_snapshot, cx);
1480 self.reparse = None;
1481 }
1482 Err(parse_task) => {
1483 self.reparse = Some(cx.spawn(async move |this, cx| {
1484 let new_syntax_map = parse_task.await;
1485 this.update(cx, move |this, cx| {
1486 let grammar_changed =
1487 this.language.as_ref().map_or(true, |current_language| {
1488 !Arc::ptr_eq(&language, current_language)
1489 });
1490 let language_registry_changed = new_syntax_map
1491 .contains_unknown_injections()
1492 && language_registry.map_or(false, |registry| {
1493 registry.version() != new_syntax_map.language_registry_version()
1494 });
1495 let parse_again = language_registry_changed
1496 || grammar_changed
1497 || this.version.changed_since(&parsed_version);
1498 this.did_finish_parsing(new_syntax_map, cx);
1499 this.reparse = None;
1500 if parse_again {
1501 this.reparse(cx);
1502 }
1503 })
1504 .ok();
1505 }));
1506 }
1507 }
1508 }
1509
1510 fn did_finish_parsing(&mut self, syntax_snapshot: SyntaxSnapshot, cx: &mut Context<Self>) {
1511 self.was_changed();
1512 self.non_text_state_update_count += 1;
1513 self.syntax_map.lock().did_parse(syntax_snapshot);
1514 self.request_autoindent(cx);
1515 self.parse_status.0.send(ParseStatus::Idle).unwrap();
1516 cx.emit(BufferEvent::Reparsed);
1517 cx.notify();
1518 }
1519
1520 pub fn parse_status(&self) -> watch::Receiver<ParseStatus> {
1521 self.parse_status.1.clone()
1522 }
1523
1524 /// Assign to the buffer a set of diagnostics created by a given language server.
1525 pub fn update_diagnostics(
1526 &mut self,
1527 server_id: LanguageServerId,
1528 diagnostics: DiagnosticSet,
1529 cx: &mut Context<Self>,
1530 ) {
1531 let lamport_timestamp = self.text.lamport_clock.tick();
1532 let op = Operation::UpdateDiagnostics {
1533 server_id,
1534 diagnostics: diagnostics.iter().cloned().collect(),
1535 lamport_timestamp,
1536 };
1537 self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
1538 self.send_operation(op, true, cx);
1539 }
1540
1541 pub fn get_diagnostics(&self, server_id: LanguageServerId) -> Option<&DiagnosticSet> {
1542 let Ok(idx) = self.diagnostics.binary_search_by_key(&server_id, |v| v.0) else {
1543 return None;
1544 };
1545 Some(&self.diagnostics[idx].1)
1546 }
1547
1548 fn request_autoindent(&mut self, cx: &mut Context<Self>) {
1549 if let Some(indent_sizes) = self.compute_autoindents() {
1550 let indent_sizes = cx.background_spawn(indent_sizes);
1551 match cx
1552 .background_executor()
1553 .block_with_timeout(Duration::from_micros(500), indent_sizes)
1554 {
1555 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
1556 Err(indent_sizes) => {
1557 self.pending_autoindent = Some(cx.spawn(async move |this, cx| {
1558 let indent_sizes = indent_sizes.await;
1559 this.update(cx, |this, cx| {
1560 this.apply_autoindents(indent_sizes, cx);
1561 })
1562 .ok();
1563 }));
1564 }
1565 }
1566 } else {
1567 self.autoindent_requests.clear();
1568 }
1569 }
1570
1571 fn compute_autoindents(
1572 &self,
1573 ) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>> + use<>> {
1574 let max_rows_between_yields = 100;
1575 let snapshot = self.snapshot();
1576 if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
1577 return None;
1578 }
1579
1580 let autoindent_requests = self.autoindent_requests.clone();
1581 Some(async move {
1582 let mut indent_sizes = BTreeMap::<u32, (IndentSize, bool)>::new();
1583 for request in autoindent_requests {
1584 // Resolve each edited range to its row in the current buffer and in the
1585 // buffer before this batch of edits.
1586 let mut row_ranges = Vec::new();
1587 let mut old_to_new_rows = BTreeMap::new();
1588 let mut language_indent_sizes_by_new_row = Vec::new();
1589 for entry in &request.entries {
1590 let position = entry.range.start;
1591 let new_row = position.to_point(&snapshot).row;
1592 let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
1593 language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
1594
1595 if !entry.first_line_is_new {
1596 let old_row = position.to_point(&request.before_edit).row;
1597 old_to_new_rows.insert(old_row, new_row);
1598 }
1599 row_ranges.push((new_row..new_end_row, entry.original_indent_column));
1600 }
1601
1602 // Build a map containing the suggested indentation for each of the edited lines
1603 // with respect to the state of the buffer before these edits. This map is keyed
1604 // by the rows for these lines in the current state of the buffer.
1605 let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
1606 let old_edited_ranges =
1607 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
1608 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1609 let mut language_indent_size = IndentSize::default();
1610 for old_edited_range in old_edited_ranges {
1611 let suggestions = request
1612 .before_edit
1613 .suggest_autoindents(old_edited_range.clone())
1614 .into_iter()
1615 .flatten();
1616 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1617 if let Some(suggestion) = suggestion {
1618 let new_row = *old_to_new_rows.get(&old_row).unwrap();
1619
1620 // Find the indent size based on the language for this row.
1621 while let Some((row, size)) = language_indent_sizes.peek() {
1622 if *row > new_row {
1623 break;
1624 }
1625 language_indent_size = *size;
1626 language_indent_sizes.next();
1627 }
1628
1629 let suggested_indent = old_to_new_rows
1630 .get(&suggestion.basis_row)
1631 .and_then(|from_row| {
1632 Some(old_suggestions.get(from_row).copied()?.0)
1633 })
1634 .unwrap_or_else(|| {
1635 request
1636 .before_edit
1637 .indent_size_for_line(suggestion.basis_row)
1638 })
1639 .with_delta(suggestion.delta, language_indent_size);
1640 old_suggestions
1641 .insert(new_row, (suggested_indent, suggestion.within_error));
1642 }
1643 }
1644 yield_now().await;
1645 }
1646
1647 // Compute new suggestions for each line, but only include them in the result
1648 // if they differ from the old suggestion for that line.
1649 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1650 let mut language_indent_size = IndentSize::default();
1651 for (row_range, original_indent_column) in row_ranges {
1652 let new_edited_row_range = if request.is_block_mode {
1653 row_range.start..row_range.start + 1
1654 } else {
1655 row_range.clone()
1656 };
1657
1658 let suggestions = snapshot
1659 .suggest_autoindents(new_edited_row_range.clone())
1660 .into_iter()
1661 .flatten();
1662 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
1663 if let Some(suggestion) = suggestion {
1664 // Find the indent size based on the language for this row.
1665 while let Some((row, size)) = language_indent_sizes.peek() {
1666 if *row > new_row {
1667 break;
1668 }
1669 language_indent_size = *size;
1670 language_indent_sizes.next();
1671 }
1672
1673 let suggested_indent = indent_sizes
1674 .get(&suggestion.basis_row)
1675 .copied()
1676 .map(|e| e.0)
1677 .unwrap_or_else(|| {
1678 snapshot.indent_size_for_line(suggestion.basis_row)
1679 })
1680 .with_delta(suggestion.delta, language_indent_size);
1681
1682 if old_suggestions.get(&new_row).map_or(
1683 true,
1684 |(old_indentation, was_within_error)| {
1685 suggested_indent != *old_indentation
1686 && (!suggestion.within_error || *was_within_error)
1687 },
1688 ) {
1689 indent_sizes.insert(
1690 new_row,
1691 (suggested_indent, request.ignore_empty_lines),
1692 );
1693 }
1694 }
1695 }
1696
1697 if let (true, Some(original_indent_column)) =
1698 (request.is_block_mode, original_indent_column)
1699 {
1700 let new_indent =
1701 if let Some((indent, _)) = indent_sizes.get(&row_range.start) {
1702 *indent
1703 } else {
1704 snapshot.indent_size_for_line(row_range.start)
1705 };
1706 let delta = new_indent.len as i64 - original_indent_column as i64;
1707 if delta != 0 {
1708 for row in row_range.skip(1) {
1709 indent_sizes.entry(row).or_insert_with(|| {
1710 let mut size = snapshot.indent_size_for_line(row);
1711 if size.kind == new_indent.kind {
1712 match delta.cmp(&0) {
1713 Ordering::Greater => size.len += delta as u32,
1714 Ordering::Less => {
1715 size.len = size.len.saturating_sub(-delta as u32)
1716 }
1717 Ordering::Equal => {}
1718 }
1719 }
1720 (size, request.ignore_empty_lines)
1721 });
1722 }
1723 }
1724 }
1725
1726 yield_now().await;
1727 }
1728 }
1729
1730 indent_sizes
1731 .into_iter()
1732 .filter_map(|(row, (indent, ignore_empty_lines))| {
1733 if ignore_empty_lines && snapshot.line_len(row) == 0 {
1734 None
1735 } else {
1736 Some((row, indent))
1737 }
1738 })
1739 .collect()
1740 })
1741 }
1742
1743 fn apply_autoindents(
1744 &mut self,
1745 indent_sizes: BTreeMap<u32, IndentSize>,
1746 cx: &mut Context<Self>,
1747 ) {
1748 self.autoindent_requests.clear();
1749
1750 let edits: Vec<_> = indent_sizes
1751 .into_iter()
1752 .filter_map(|(row, indent_size)| {
1753 let current_size = indent_size_for_line(self, row);
1754 Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
1755 })
1756 .collect();
1757
1758 let preserve_preview = self.preserve_preview();
1759 self.edit(edits, None, cx);
1760 if preserve_preview {
1761 self.refresh_preview();
1762 }
1763 }
1764
1765 /// Create a minimal edit that will cause the given row to be indented
1766 /// with the given size. After applying this edit, the length of the line
1767 /// will always be at least `new_size.len`.
1768 pub fn edit_for_indent_size_adjustment(
1769 row: u32,
1770 current_size: IndentSize,
1771 new_size: IndentSize,
1772 ) -> Option<(Range<Point>, String)> {
1773 if new_size.kind == current_size.kind {
1774 match new_size.len.cmp(¤t_size.len) {
1775 Ordering::Greater => {
1776 let point = Point::new(row, 0);
1777 Some((
1778 point..point,
1779 iter::repeat(new_size.char())
1780 .take((new_size.len - current_size.len) as usize)
1781 .collect::<String>(),
1782 ))
1783 }
1784
1785 Ordering::Less => Some((
1786 Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
1787 String::new(),
1788 )),
1789
1790 Ordering::Equal => None,
1791 }
1792 } else {
1793 Some((
1794 Point::new(row, 0)..Point::new(row, current_size.len),
1795 iter::repeat(new_size.char())
1796 .take(new_size.len as usize)
1797 .collect::<String>(),
1798 ))
1799 }
1800 }
1801
1802 /// Spawns a background task that asynchronously computes a `Diff` between the buffer's text
1803 /// and the given new text.
1804 pub fn diff(&self, mut new_text: String, cx: &App) -> Task<Diff> {
1805 let old_text = self.as_rope().clone();
1806 let base_version = self.version();
1807 cx.background_executor()
1808 .spawn_labeled(*BUFFER_DIFF_TASK, async move {
1809 let old_text = old_text.to_string();
1810 let line_ending = LineEnding::detect(&new_text);
1811 LineEnding::normalize(&mut new_text);
1812 let edits = text_diff(&old_text, &new_text);
1813 Diff {
1814 base_version,
1815 line_ending,
1816 edits,
1817 }
1818 })
1819 }
1820
1821 /// Spawns a background task that searches the buffer for any whitespace
1822 /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
1823 pub fn remove_trailing_whitespace(&self, cx: &App) -> Task<Diff> {
1824 let old_text = self.as_rope().clone();
1825 let line_ending = self.line_ending();
1826 let base_version = self.version();
1827 cx.background_spawn(async move {
1828 let ranges = trailing_whitespace_ranges(&old_text);
1829 let empty = Arc::<str>::from("");
1830 Diff {
1831 base_version,
1832 line_ending,
1833 edits: ranges
1834 .into_iter()
1835 .map(|range| (range, empty.clone()))
1836 .collect(),
1837 }
1838 })
1839 }
1840
1841 /// Ensures that the buffer ends with a single newline character, and
1842 /// no other whitespace.
1843 pub fn ensure_final_newline(&mut self, cx: &mut Context<Self>) {
1844 let len = self.len();
1845 let mut offset = len;
1846 for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
1847 let non_whitespace_len = chunk
1848 .trim_end_matches(|c: char| c.is_ascii_whitespace())
1849 .len();
1850 offset -= chunk.len();
1851 offset += non_whitespace_len;
1852 if non_whitespace_len != 0 {
1853 if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
1854 return;
1855 }
1856 break;
1857 }
1858 }
1859 self.edit([(offset..len, "\n")], None, cx);
1860 }
1861
1862 /// Applies a diff to the buffer. If the buffer has changed since the given diff was
1863 /// calculated, then adjust the diff to account for those changes, and discard any
1864 /// parts of the diff that conflict with those changes.
1865 pub fn apply_diff(&mut self, diff: Diff, cx: &mut Context<Self>) -> Option<TransactionId> {
1866 let snapshot = self.snapshot();
1867 let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
1868 let mut delta = 0;
1869 let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
1870 while let Some(edit_since) = edits_since.peek() {
1871 // If the edit occurs after a diff hunk, then it does not
1872 // affect that hunk.
1873 if edit_since.old.start > range.end {
1874 break;
1875 }
1876 // If the edit precedes the diff hunk, then adjust the hunk
1877 // to reflect the edit.
1878 else if edit_since.old.end < range.start {
1879 delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
1880 edits_since.next();
1881 }
1882 // If the edit intersects a diff hunk, then discard that hunk.
1883 else {
1884 return None;
1885 }
1886 }
1887
1888 let start = (range.start as i64 + delta) as usize;
1889 let end = (range.end as i64 + delta) as usize;
1890 Some((start..end, new_text))
1891 });
1892
1893 self.start_transaction();
1894 self.text.set_line_ending(diff.line_ending);
1895 self.edit(adjusted_edits, None, cx);
1896 self.end_transaction(cx)
1897 }
1898
1899 fn has_unsaved_edits(&self) -> bool {
1900 let (last_version, has_unsaved_edits) = self.has_unsaved_edits.take();
1901
1902 if last_version == self.version {
1903 self.has_unsaved_edits
1904 .set((last_version, has_unsaved_edits));
1905 return has_unsaved_edits;
1906 }
1907
1908 let has_edits = self.has_edits_since(&self.saved_version);
1909 self.has_unsaved_edits
1910 .set((self.version.clone(), has_edits));
1911 has_edits
1912 }
1913
1914 /// Checks if the buffer has unsaved changes.
1915 pub fn is_dirty(&self) -> bool {
1916 if self.capability == Capability::ReadOnly {
1917 return false;
1918 }
1919 if self.has_conflict {
1920 return true;
1921 }
1922 match self.file.as_ref().map(|f| f.disk_state()) {
1923 Some(DiskState::New) | Some(DiskState::Deleted) => {
1924 !self.is_empty() && self.has_unsaved_edits()
1925 }
1926 _ => self.has_unsaved_edits(),
1927 }
1928 }
1929
1930 /// Checks if the buffer and its file have both changed since the buffer
1931 /// was last saved or reloaded.
1932 pub fn has_conflict(&self) -> bool {
1933 if self.has_conflict {
1934 return true;
1935 }
1936 let Some(file) = self.file.as_ref() else {
1937 return false;
1938 };
1939 match file.disk_state() {
1940 DiskState::New => false,
1941 DiskState::Present { mtime } => match self.saved_mtime {
1942 Some(saved_mtime) => {
1943 mtime.bad_is_greater_than(saved_mtime) && self.has_unsaved_edits()
1944 }
1945 None => true,
1946 },
1947 DiskState::Deleted => false,
1948 }
1949 }
1950
1951 /// Gets a [`Subscription`] that tracks all of the changes to the buffer's text.
1952 pub fn subscribe(&mut self) -> Subscription {
1953 self.text.subscribe()
1954 }
1955
1956 /// Adds a bit to the list of bits that are set when the buffer's text changes.
1957 ///
1958 /// This allows downstream code to check if the buffer's text has changed without
1959 /// waiting for an effect cycle, which would be required if using eents.
1960 pub fn record_changes(&mut self, bit: rc::Weak<Cell<bool>>) {
1961 if let Err(ix) = self
1962 .change_bits
1963 .binary_search_by_key(&rc::Weak::as_ptr(&bit), rc::Weak::as_ptr)
1964 {
1965 self.change_bits.insert(ix, bit);
1966 }
1967 }
1968
1969 fn was_changed(&mut self) {
1970 self.change_bits.retain(|change_bit| {
1971 change_bit.upgrade().map_or(false, |bit| {
1972 bit.replace(true);
1973 true
1974 })
1975 });
1976 }
1977
1978 /// Starts a transaction, if one is not already in-progress. When undoing or
1979 /// redoing edits, all of the edits performed within a transaction are undone
1980 /// or redone together.
1981 pub fn start_transaction(&mut self) -> Option<TransactionId> {
1982 self.start_transaction_at(Instant::now())
1983 }
1984
1985 /// Starts a transaction, providing the current time. Subsequent transactions
1986 /// that occur within a short period of time will be grouped together. This
1987 /// is controlled by the buffer's undo grouping duration.
1988 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
1989 self.transaction_depth += 1;
1990 if self.was_dirty_before_starting_transaction.is_none() {
1991 self.was_dirty_before_starting_transaction = Some(self.is_dirty());
1992 }
1993 self.text.start_transaction_at(now)
1994 }
1995
1996 /// Terminates the current transaction, if this is the outermost transaction.
1997 pub fn end_transaction(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
1998 self.end_transaction_at(Instant::now(), cx)
1999 }
2000
2001 /// Terminates the current transaction, providing the current time. Subsequent transactions
2002 /// that occur within a short period of time will be grouped together. This
2003 /// is controlled by the buffer's undo grouping duration.
2004 pub fn end_transaction_at(
2005 &mut self,
2006 now: Instant,
2007 cx: &mut Context<Self>,
2008 ) -> Option<TransactionId> {
2009 assert!(self.transaction_depth > 0);
2010 self.transaction_depth -= 1;
2011 let was_dirty = if self.transaction_depth == 0 {
2012 self.was_dirty_before_starting_transaction.take().unwrap()
2013 } else {
2014 false
2015 };
2016 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
2017 self.did_edit(&start_version, was_dirty, cx);
2018 Some(transaction_id)
2019 } else {
2020 None
2021 }
2022 }
2023
2024 /// Manually add a transaction to the buffer's undo history.
2025 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
2026 self.text.push_transaction(transaction, now);
2027 }
2028
2029 /// Prevent the last transaction from being grouped with any subsequent transactions,
2030 /// even if they occur with the buffer's undo grouping duration.
2031 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
2032 self.text.finalize_last_transaction()
2033 }
2034
2035 /// Manually group all changes since a given transaction.
2036 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
2037 self.text.group_until_transaction(transaction_id);
2038 }
2039
2040 /// Manually remove a transaction from the buffer's undo history
2041 pub fn forget_transaction(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
2042 self.text.forget_transaction(transaction_id)
2043 }
2044
2045 /// Retrieve a transaction from the buffer's undo history
2046 pub fn get_transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> {
2047 self.text.get_transaction(transaction_id)
2048 }
2049
2050 /// Manually merge two transactions in the buffer's undo history.
2051 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
2052 self.text.merge_transactions(transaction, destination);
2053 }
2054
2055 /// Waits for the buffer to receive operations with the given timestamps.
2056 pub fn wait_for_edits<It: IntoIterator<Item = clock::Lamport>>(
2057 &mut self,
2058 edit_ids: It,
2059 ) -> impl Future<Output = Result<()>> + use<It> {
2060 self.text.wait_for_edits(edit_ids)
2061 }
2062
2063 /// Waits for the buffer to receive the operations necessary for resolving the given anchors.
2064 pub fn wait_for_anchors<It: IntoIterator<Item = Anchor>>(
2065 &mut self,
2066 anchors: It,
2067 ) -> impl 'static + Future<Output = Result<()>> + use<It> {
2068 self.text.wait_for_anchors(anchors)
2069 }
2070
2071 /// Waits for the buffer to receive operations up to the given version.
2072 pub fn wait_for_version(
2073 &mut self,
2074 version: clock::Global,
2075 ) -> impl Future<Output = Result<()>> + use<> {
2076 self.text.wait_for_version(version)
2077 }
2078
2079 /// Forces all futures returned by [`Buffer::wait_for_version`], [`Buffer::wait_for_edits`], or
2080 /// [`Buffer::wait_for_version`] to resolve with an error.
2081 pub fn give_up_waiting(&mut self) {
2082 self.text.give_up_waiting();
2083 }
2084
2085 /// Stores a set of selections that should be broadcasted to all of the buffer's replicas.
2086 pub fn set_active_selections(
2087 &mut self,
2088 selections: Arc<[Selection<Anchor>]>,
2089 line_mode: bool,
2090 cursor_shape: CursorShape,
2091 cx: &mut Context<Self>,
2092 ) {
2093 let lamport_timestamp = self.text.lamport_clock.tick();
2094 self.remote_selections.insert(
2095 self.text.replica_id(),
2096 SelectionSet {
2097 selections: selections.clone(),
2098 lamport_timestamp,
2099 line_mode,
2100 cursor_shape,
2101 },
2102 );
2103 self.send_operation(
2104 Operation::UpdateSelections {
2105 selections,
2106 line_mode,
2107 lamport_timestamp,
2108 cursor_shape,
2109 },
2110 true,
2111 cx,
2112 );
2113 self.non_text_state_update_count += 1;
2114 cx.notify();
2115 }
2116
2117 /// Clears the selections, so that other replicas of the buffer do not see any selections for
2118 /// this replica.
2119 pub fn remove_active_selections(&mut self, cx: &mut Context<Self>) {
2120 if self
2121 .remote_selections
2122 .get(&self.text.replica_id())
2123 .map_or(true, |set| !set.selections.is_empty())
2124 {
2125 self.set_active_selections(Arc::default(), false, Default::default(), cx);
2126 }
2127 }
2128
2129 pub fn set_agent_selections(
2130 &mut self,
2131 selections: Arc<[Selection<Anchor>]>,
2132 line_mode: bool,
2133 cursor_shape: CursorShape,
2134 cx: &mut Context<Self>,
2135 ) {
2136 let lamport_timestamp = self.text.lamport_clock.tick();
2137 self.remote_selections.insert(
2138 AGENT_REPLICA_ID,
2139 SelectionSet {
2140 selections: selections.clone(),
2141 lamport_timestamp,
2142 line_mode,
2143 cursor_shape,
2144 },
2145 );
2146 self.non_text_state_update_count += 1;
2147 cx.notify();
2148 }
2149
2150 pub fn remove_agent_selections(&mut self, cx: &mut Context<Self>) {
2151 self.set_agent_selections(Arc::default(), false, Default::default(), cx);
2152 }
2153
2154 /// Replaces the buffer's entire text.
2155 pub fn set_text<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2156 where
2157 T: Into<Arc<str>>,
2158 {
2159 self.autoindent_requests.clear();
2160 self.edit([(0..self.len(), text)], None, cx)
2161 }
2162
2163 /// Appends the given text to the end of the buffer.
2164 pub fn append<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2165 where
2166 T: Into<Arc<str>>,
2167 {
2168 self.edit([(self.len()..self.len(), text)], None, cx)
2169 }
2170
2171 /// Applies the given edits to the buffer. Each edit is specified as a range of text to
2172 /// delete, and a string of text to insert at that location.
2173 ///
2174 /// If an [`AutoindentMode`] is provided, then the buffer will enqueue an auto-indent
2175 /// request for the edited ranges, which will be processed when the buffer finishes
2176 /// parsing.
2177 ///
2178 /// Parsing takes place at the end of a transaction, and may compute synchronously
2179 /// or asynchronously, depending on the changes.
2180 pub fn edit<I, S, T>(
2181 &mut self,
2182 edits_iter: I,
2183 autoindent_mode: Option<AutoindentMode>,
2184 cx: &mut Context<Self>,
2185 ) -> Option<clock::Lamport>
2186 where
2187 I: IntoIterator<Item = (Range<S>, T)>,
2188 S: ToOffset,
2189 T: Into<Arc<str>>,
2190 {
2191 // Skip invalid edits and coalesce contiguous ones.
2192 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
2193
2194 for (range, new_text) in edits_iter {
2195 let mut range = range.start.to_offset(self)..range.end.to_offset(self);
2196
2197 if range.start > range.end {
2198 mem::swap(&mut range.start, &mut range.end);
2199 }
2200 let new_text = new_text.into();
2201 if !new_text.is_empty() || !range.is_empty() {
2202 if let Some((prev_range, prev_text)) = edits.last_mut() {
2203 if prev_range.end >= range.start {
2204 prev_range.end = cmp::max(prev_range.end, range.end);
2205 *prev_text = format!("{prev_text}{new_text}").into();
2206 } else {
2207 edits.push((range, new_text));
2208 }
2209 } else {
2210 edits.push((range, new_text));
2211 }
2212 }
2213 }
2214 if edits.is_empty() {
2215 return None;
2216 }
2217
2218 self.start_transaction();
2219 self.pending_autoindent.take();
2220 let autoindent_request = autoindent_mode
2221 .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
2222
2223 let edit_operation = self.text.edit(edits.iter().cloned());
2224 let edit_id = edit_operation.timestamp();
2225
2226 if let Some((before_edit, mode)) = autoindent_request {
2227 let mut delta = 0isize;
2228 let entries = edits
2229 .into_iter()
2230 .enumerate()
2231 .zip(&edit_operation.as_edit().unwrap().new_text)
2232 .map(|((ix, (range, _)), new_text)| {
2233 let new_text_length = new_text.len();
2234 let old_start = range.start.to_point(&before_edit);
2235 let new_start = (delta + range.start as isize) as usize;
2236 let range_len = range.end - range.start;
2237 delta += new_text_length as isize - range_len as isize;
2238
2239 // Decide what range of the insertion to auto-indent, and whether
2240 // the first line of the insertion should be considered a newly-inserted line
2241 // or an edit to an existing line.
2242 let mut range_of_insertion_to_indent = 0..new_text_length;
2243 let mut first_line_is_new = true;
2244
2245 let old_line_start = before_edit.indent_size_for_line(old_start.row).len;
2246 let old_line_end = before_edit.line_len(old_start.row);
2247
2248 if old_start.column > old_line_start {
2249 first_line_is_new = false;
2250 }
2251
2252 if !new_text.contains('\n')
2253 && (old_start.column + (range_len as u32) < old_line_end
2254 || old_line_end == old_line_start)
2255 {
2256 first_line_is_new = false;
2257 }
2258
2259 // When inserting text starting with a newline, avoid auto-indenting the
2260 // previous line.
2261 if new_text.starts_with('\n') {
2262 range_of_insertion_to_indent.start += 1;
2263 first_line_is_new = true;
2264 }
2265
2266 let mut original_indent_column = None;
2267 if let AutoindentMode::Block {
2268 original_indent_columns,
2269 } = &mode
2270 {
2271 original_indent_column = Some(if new_text.starts_with('\n') {
2272 indent_size_for_text(
2273 new_text[range_of_insertion_to_indent.clone()].chars(),
2274 )
2275 .len
2276 } else {
2277 original_indent_columns
2278 .get(ix)
2279 .copied()
2280 .flatten()
2281 .unwrap_or_else(|| {
2282 indent_size_for_text(
2283 new_text[range_of_insertion_to_indent.clone()].chars(),
2284 )
2285 .len
2286 })
2287 });
2288
2289 // Avoid auto-indenting the line after the edit.
2290 if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
2291 range_of_insertion_to_indent.end -= 1;
2292 }
2293 }
2294
2295 AutoindentRequestEntry {
2296 first_line_is_new,
2297 original_indent_column,
2298 indent_size: before_edit.language_indent_size_at(range.start, cx),
2299 range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
2300 ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
2301 }
2302 })
2303 .collect();
2304
2305 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2306 before_edit,
2307 entries,
2308 is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
2309 ignore_empty_lines: false,
2310 }));
2311 }
2312
2313 self.end_transaction(cx);
2314 self.send_operation(Operation::Buffer(edit_operation), true, cx);
2315 Some(edit_id)
2316 }
2317
2318 fn did_edit(&mut self, old_version: &clock::Global, was_dirty: bool, cx: &mut Context<Self>) {
2319 self.was_changed();
2320
2321 if self.edits_since::<usize>(old_version).next().is_none() {
2322 return;
2323 }
2324
2325 self.reparse(cx);
2326 cx.emit(BufferEvent::Edited);
2327 if was_dirty != self.is_dirty() {
2328 cx.emit(BufferEvent::DirtyChanged);
2329 }
2330 cx.notify();
2331 }
2332
2333 pub fn autoindent_ranges<I, T>(&mut self, ranges: I, cx: &mut Context<Self>)
2334 where
2335 I: IntoIterator<Item = Range<T>>,
2336 T: ToOffset + Copy,
2337 {
2338 let before_edit = self.snapshot();
2339 let entries = ranges
2340 .into_iter()
2341 .map(|range| AutoindentRequestEntry {
2342 range: before_edit.anchor_before(range.start)..before_edit.anchor_after(range.end),
2343 first_line_is_new: true,
2344 indent_size: before_edit.language_indent_size_at(range.start, cx),
2345 original_indent_column: None,
2346 })
2347 .collect();
2348 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2349 before_edit,
2350 entries,
2351 is_block_mode: false,
2352 ignore_empty_lines: true,
2353 }));
2354 self.request_autoindent(cx);
2355 }
2356
2357 // Inserts newlines at the given position to create an empty line, returning the start of the new line.
2358 // You can also request the insertion of empty lines above and below the line starting at the returned point.
2359 pub fn insert_empty_line(
2360 &mut self,
2361 position: impl ToPoint,
2362 space_above: bool,
2363 space_below: bool,
2364 cx: &mut Context<Self>,
2365 ) -> Point {
2366 let mut position = position.to_point(self);
2367
2368 self.start_transaction();
2369
2370 self.edit(
2371 [(position..position, "\n")],
2372 Some(AutoindentMode::EachLine),
2373 cx,
2374 );
2375
2376 if position.column > 0 {
2377 position += Point::new(1, 0);
2378 }
2379
2380 if !self.is_line_blank(position.row) {
2381 self.edit(
2382 [(position..position, "\n")],
2383 Some(AutoindentMode::EachLine),
2384 cx,
2385 );
2386 }
2387
2388 if space_above && position.row > 0 && !self.is_line_blank(position.row - 1) {
2389 self.edit(
2390 [(position..position, "\n")],
2391 Some(AutoindentMode::EachLine),
2392 cx,
2393 );
2394 position.row += 1;
2395 }
2396
2397 if space_below
2398 && (position.row == self.max_point().row || !self.is_line_blank(position.row + 1))
2399 {
2400 self.edit(
2401 [(position..position, "\n")],
2402 Some(AutoindentMode::EachLine),
2403 cx,
2404 );
2405 }
2406
2407 self.end_transaction(cx);
2408
2409 position
2410 }
2411
2412 /// Applies the given remote operations to the buffer.
2413 pub fn apply_ops<I: IntoIterator<Item = Operation>>(&mut self, ops: I, cx: &mut Context<Self>) {
2414 self.pending_autoindent.take();
2415 let was_dirty = self.is_dirty();
2416 let old_version = self.version.clone();
2417 let mut deferred_ops = Vec::new();
2418 let buffer_ops = ops
2419 .into_iter()
2420 .filter_map(|op| match op {
2421 Operation::Buffer(op) => Some(op),
2422 _ => {
2423 if self.can_apply_op(&op) {
2424 self.apply_op(op, cx);
2425 } else {
2426 deferred_ops.push(op);
2427 }
2428 None
2429 }
2430 })
2431 .collect::<Vec<_>>();
2432 for operation in buffer_ops.iter() {
2433 self.send_operation(Operation::Buffer(operation.clone()), false, cx);
2434 }
2435 self.text.apply_ops(buffer_ops);
2436 self.deferred_ops.insert(deferred_ops);
2437 self.flush_deferred_ops(cx);
2438 self.did_edit(&old_version, was_dirty, cx);
2439 // Notify independently of whether the buffer was edited as the operations could include a
2440 // selection update.
2441 cx.notify();
2442 }
2443
2444 fn flush_deferred_ops(&mut self, cx: &mut Context<Self>) {
2445 let mut deferred_ops = Vec::new();
2446 for op in self.deferred_ops.drain().iter().cloned() {
2447 if self.can_apply_op(&op) {
2448 self.apply_op(op, cx);
2449 } else {
2450 deferred_ops.push(op);
2451 }
2452 }
2453 self.deferred_ops.insert(deferred_ops);
2454 }
2455
2456 pub fn has_deferred_ops(&self) -> bool {
2457 !self.deferred_ops.is_empty() || self.text.has_deferred_ops()
2458 }
2459
2460 fn can_apply_op(&self, operation: &Operation) -> bool {
2461 match operation {
2462 Operation::Buffer(_) => {
2463 unreachable!("buffer operations should never be applied at this layer")
2464 }
2465 Operation::UpdateDiagnostics {
2466 diagnostics: diagnostic_set,
2467 ..
2468 } => diagnostic_set.iter().all(|diagnostic| {
2469 self.text.can_resolve(&diagnostic.range.start)
2470 && self.text.can_resolve(&diagnostic.range.end)
2471 }),
2472 Operation::UpdateSelections { selections, .. } => selections
2473 .iter()
2474 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
2475 Operation::UpdateCompletionTriggers { .. } => true,
2476 }
2477 }
2478
2479 fn apply_op(&mut self, operation: Operation, cx: &mut Context<Self>) {
2480 match operation {
2481 Operation::Buffer(_) => {
2482 unreachable!("buffer operations should never be applied at this layer")
2483 }
2484 Operation::UpdateDiagnostics {
2485 server_id,
2486 diagnostics: diagnostic_set,
2487 lamport_timestamp,
2488 } => {
2489 let snapshot = self.snapshot();
2490 self.apply_diagnostic_update(
2491 server_id,
2492 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
2493 lamport_timestamp,
2494 cx,
2495 );
2496 }
2497 Operation::UpdateSelections {
2498 selections,
2499 lamport_timestamp,
2500 line_mode,
2501 cursor_shape,
2502 } => {
2503 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id) {
2504 if set.lamport_timestamp > lamport_timestamp {
2505 return;
2506 }
2507 }
2508
2509 self.remote_selections.insert(
2510 lamport_timestamp.replica_id,
2511 SelectionSet {
2512 selections,
2513 lamport_timestamp,
2514 line_mode,
2515 cursor_shape,
2516 },
2517 );
2518 self.text.lamport_clock.observe(lamport_timestamp);
2519 self.non_text_state_update_count += 1;
2520 }
2521 Operation::UpdateCompletionTriggers {
2522 triggers,
2523 lamport_timestamp,
2524 server_id,
2525 } => {
2526 if triggers.is_empty() {
2527 self.completion_triggers_per_language_server
2528 .remove(&server_id);
2529 self.completion_triggers = self
2530 .completion_triggers_per_language_server
2531 .values()
2532 .flat_map(|triggers| triggers.into_iter().cloned())
2533 .collect();
2534 } else {
2535 self.completion_triggers_per_language_server
2536 .insert(server_id, triggers.iter().cloned().collect());
2537 self.completion_triggers.extend(triggers);
2538 }
2539 self.text.lamport_clock.observe(lamport_timestamp);
2540 }
2541 }
2542 }
2543
2544 fn apply_diagnostic_update(
2545 &mut self,
2546 server_id: LanguageServerId,
2547 diagnostics: DiagnosticSet,
2548 lamport_timestamp: clock::Lamport,
2549 cx: &mut Context<Self>,
2550 ) {
2551 if lamport_timestamp > self.diagnostics_timestamp {
2552 let ix = self.diagnostics.binary_search_by_key(&server_id, |e| e.0);
2553 if diagnostics.is_empty() {
2554 if let Ok(ix) = ix {
2555 self.diagnostics.remove(ix);
2556 }
2557 } else {
2558 match ix {
2559 Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)),
2560 Ok(ix) => self.diagnostics[ix].1 = diagnostics,
2561 };
2562 }
2563 self.diagnostics_timestamp = lamport_timestamp;
2564 self.non_text_state_update_count += 1;
2565 self.text.lamport_clock.observe(lamport_timestamp);
2566 cx.notify();
2567 cx.emit(BufferEvent::DiagnosticsUpdated);
2568 }
2569 }
2570
2571 fn send_operation(&mut self, operation: Operation, is_local: bool, cx: &mut Context<Self>) {
2572 self.was_changed();
2573 cx.emit(BufferEvent::Operation {
2574 operation,
2575 is_local,
2576 });
2577 }
2578
2579 /// Removes the selections for a given peer.
2580 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut Context<Self>) {
2581 self.remote_selections.remove(&replica_id);
2582 cx.notify();
2583 }
2584
2585 /// Undoes the most recent transaction.
2586 pub fn undo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2587 let was_dirty = self.is_dirty();
2588 let old_version = self.version.clone();
2589
2590 if let Some((transaction_id, operation)) = self.text.undo() {
2591 self.send_operation(Operation::Buffer(operation), true, cx);
2592 self.did_edit(&old_version, was_dirty, cx);
2593 Some(transaction_id)
2594 } else {
2595 None
2596 }
2597 }
2598
2599 /// Manually undoes a specific transaction in the buffer's undo history.
2600 pub fn undo_transaction(
2601 &mut self,
2602 transaction_id: TransactionId,
2603 cx: &mut Context<Self>,
2604 ) -> bool {
2605 let was_dirty = self.is_dirty();
2606 let old_version = self.version.clone();
2607 if let Some(operation) = self.text.undo_transaction(transaction_id) {
2608 self.send_operation(Operation::Buffer(operation), true, cx);
2609 self.did_edit(&old_version, was_dirty, cx);
2610 true
2611 } else {
2612 false
2613 }
2614 }
2615
2616 /// Manually undoes all changes after a given transaction in the buffer's undo history.
2617 pub fn undo_to_transaction(
2618 &mut self,
2619 transaction_id: TransactionId,
2620 cx: &mut Context<Self>,
2621 ) -> bool {
2622 let was_dirty = self.is_dirty();
2623 let old_version = self.version.clone();
2624
2625 let operations = self.text.undo_to_transaction(transaction_id);
2626 let undone = !operations.is_empty();
2627 for operation in operations {
2628 self.send_operation(Operation::Buffer(operation), true, cx);
2629 }
2630 if undone {
2631 self.did_edit(&old_version, was_dirty, cx)
2632 }
2633 undone
2634 }
2635
2636 pub fn undo_operations(&mut self, counts: HashMap<Lamport, u32>, cx: &mut Context<Buffer>) {
2637 let was_dirty = self.is_dirty();
2638 let operation = self.text.undo_operations(counts);
2639 let old_version = self.version.clone();
2640 self.send_operation(Operation::Buffer(operation), true, cx);
2641 self.did_edit(&old_version, was_dirty, cx);
2642 }
2643
2644 /// Manually redoes a specific transaction in the buffer's redo history.
2645 pub fn redo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2646 let was_dirty = self.is_dirty();
2647 let old_version = self.version.clone();
2648
2649 if let Some((transaction_id, operation)) = self.text.redo() {
2650 self.send_operation(Operation::Buffer(operation), true, cx);
2651 self.did_edit(&old_version, was_dirty, cx);
2652 Some(transaction_id)
2653 } else {
2654 None
2655 }
2656 }
2657
2658 /// Manually undoes all changes until a given transaction in the buffer's redo history.
2659 pub fn redo_to_transaction(
2660 &mut self,
2661 transaction_id: TransactionId,
2662 cx: &mut Context<Self>,
2663 ) -> bool {
2664 let was_dirty = self.is_dirty();
2665 let old_version = self.version.clone();
2666
2667 let operations = self.text.redo_to_transaction(transaction_id);
2668 let redone = !operations.is_empty();
2669 for operation in operations {
2670 self.send_operation(Operation::Buffer(operation), true, cx);
2671 }
2672 if redone {
2673 self.did_edit(&old_version, was_dirty, cx)
2674 }
2675 redone
2676 }
2677
2678 /// Override current completion triggers with the user-provided completion triggers.
2679 pub fn set_completion_triggers(
2680 &mut self,
2681 server_id: LanguageServerId,
2682 triggers: BTreeSet<String>,
2683 cx: &mut Context<Self>,
2684 ) {
2685 self.completion_triggers_timestamp = self.text.lamport_clock.tick();
2686 if triggers.is_empty() {
2687 self.completion_triggers_per_language_server
2688 .remove(&server_id);
2689 self.completion_triggers = self
2690 .completion_triggers_per_language_server
2691 .values()
2692 .flat_map(|triggers| triggers.into_iter().cloned())
2693 .collect();
2694 } else {
2695 self.completion_triggers_per_language_server
2696 .insert(server_id, triggers.clone());
2697 self.completion_triggers.extend(triggers.iter().cloned());
2698 }
2699 self.send_operation(
2700 Operation::UpdateCompletionTriggers {
2701 triggers: triggers.into_iter().collect(),
2702 lamport_timestamp: self.completion_triggers_timestamp,
2703 server_id,
2704 },
2705 true,
2706 cx,
2707 );
2708 cx.notify();
2709 }
2710
2711 /// Returns a list of strings which trigger a completion menu for this language.
2712 /// Usually this is driven by LSP server which returns a list of trigger characters for completions.
2713 pub fn completion_triggers(&self) -> &BTreeSet<String> {
2714 &self.completion_triggers
2715 }
2716
2717 /// Call this directly after performing edits to prevent the preview tab
2718 /// from being dismissed by those edits. It causes `should_dismiss_preview`
2719 /// to return false until there are additional edits.
2720 pub fn refresh_preview(&mut self) {
2721 self.preview_version = self.version.clone();
2722 }
2723
2724 /// Whether we should preserve the preview status of a tab containing this buffer.
2725 pub fn preserve_preview(&self) -> bool {
2726 !self.has_edits_since(&self.preview_version)
2727 }
2728}
2729
2730#[doc(hidden)]
2731#[cfg(any(test, feature = "test-support"))]
2732impl Buffer {
2733 pub fn edit_via_marked_text(
2734 &mut self,
2735 marked_string: &str,
2736 autoindent_mode: Option<AutoindentMode>,
2737 cx: &mut Context<Self>,
2738 ) {
2739 let edits = self.edits_for_marked_text(marked_string);
2740 self.edit(edits, autoindent_mode, cx);
2741 }
2742
2743 pub fn set_group_interval(&mut self, group_interval: Duration) {
2744 self.text.set_group_interval(group_interval);
2745 }
2746
2747 pub fn randomly_edit<T>(&mut self, rng: &mut T, old_range_count: usize, cx: &mut Context<Self>)
2748 where
2749 T: rand::Rng,
2750 {
2751 let mut edits: Vec<(Range<usize>, String)> = Vec::new();
2752 let mut last_end = None;
2753 for _ in 0..old_range_count {
2754 if last_end.map_or(false, |last_end| last_end >= self.len()) {
2755 break;
2756 }
2757
2758 let new_start = last_end.map_or(0, |last_end| last_end + 1);
2759 let mut range = self.random_byte_range(new_start, rng);
2760 if rng.gen_bool(0.2) {
2761 mem::swap(&mut range.start, &mut range.end);
2762 }
2763 last_end = Some(range.end);
2764
2765 let new_text_len = rng.gen_range(0..10);
2766 let mut new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
2767 new_text = new_text.to_uppercase();
2768
2769 edits.push((range, new_text));
2770 }
2771 log::info!("mutating buffer {} with {:?}", self.replica_id(), edits);
2772 self.edit(edits, None, cx);
2773 }
2774
2775 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut Context<Self>) {
2776 let was_dirty = self.is_dirty();
2777 let old_version = self.version.clone();
2778
2779 let ops = self.text.randomly_undo_redo(rng);
2780 if !ops.is_empty() {
2781 for op in ops {
2782 self.send_operation(Operation::Buffer(op), true, cx);
2783 self.did_edit(&old_version, was_dirty, cx);
2784 }
2785 }
2786 }
2787}
2788
2789impl EventEmitter<BufferEvent> for Buffer {}
2790
2791impl Deref for Buffer {
2792 type Target = TextBuffer;
2793
2794 fn deref(&self) -> &Self::Target {
2795 &self.text
2796 }
2797}
2798
2799impl BufferSnapshot {
2800 /// Returns [`IndentSize`] for a given line that respects user settings and
2801 /// language preferences.
2802 pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
2803 indent_size_for_line(self, row)
2804 }
2805
2806 /// Returns [`IndentSize`] for a given position that respects user settings
2807 /// and language preferences.
2808 pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &App) -> IndentSize {
2809 let settings = language_settings(
2810 self.language_at(position).map(|l| l.name()),
2811 self.file(),
2812 cx,
2813 );
2814 if settings.hard_tabs {
2815 IndentSize::tab()
2816 } else {
2817 IndentSize::spaces(settings.tab_size.get())
2818 }
2819 }
2820
2821 /// Retrieve the suggested indent size for all of the given rows. The unit of indentation
2822 /// is passed in as `single_indent_size`.
2823 pub fn suggested_indents(
2824 &self,
2825 rows: impl Iterator<Item = u32>,
2826 single_indent_size: IndentSize,
2827 ) -> BTreeMap<u32, IndentSize> {
2828 let mut result = BTreeMap::new();
2829
2830 for row_range in contiguous_ranges(rows, 10) {
2831 let suggestions = match self.suggest_autoindents(row_range.clone()) {
2832 Some(suggestions) => suggestions,
2833 _ => break,
2834 };
2835
2836 for (row, suggestion) in row_range.zip(suggestions) {
2837 let indent_size = if let Some(suggestion) = suggestion {
2838 result
2839 .get(&suggestion.basis_row)
2840 .copied()
2841 .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
2842 .with_delta(suggestion.delta, single_indent_size)
2843 } else {
2844 self.indent_size_for_line(row)
2845 };
2846
2847 result.insert(row, indent_size);
2848 }
2849 }
2850
2851 result
2852 }
2853
2854 fn suggest_autoindents(
2855 &self,
2856 row_range: Range<u32>,
2857 ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
2858 let config = &self.language.as_ref()?.config;
2859 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
2860 let significant_indentation = config.significant_indentation;
2861
2862 // Find the suggested indentation ranges based on the syntax tree.
2863 let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
2864 let end = Point::new(row_range.end, 0);
2865 let range = (start..end).to_offset(&self.text);
2866 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2867 Some(&grammar.indents_config.as_ref()?.query)
2868 });
2869 let indent_configs = matches
2870 .grammars()
2871 .iter()
2872 .map(|grammar| grammar.indents_config.as_ref().unwrap())
2873 .collect::<Vec<_>>();
2874
2875 let mut indent_ranges = Vec::<Range<Point>>::new();
2876 let mut outdent_positions = Vec::<Point>::new();
2877 while let Some(mat) = matches.peek() {
2878 let mut start: Option<Point> = None;
2879 let mut end: Option<Point> = None;
2880 let mut outdent: Option<Point> = None;
2881
2882 let config = &indent_configs[mat.grammar_index];
2883 for capture in mat.captures {
2884 if capture.index == config.indent_capture_ix {
2885 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
2886 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
2887 } else if Some(capture.index) == config.start_capture_ix {
2888 start = Some(Point::from_ts_point(capture.node.end_position()));
2889 } else if Some(capture.index) == config.end_capture_ix {
2890 end = Some(Point::from_ts_point(capture.node.start_position()));
2891 } else if Some(capture.index) == config.outdent_capture_ix {
2892 let point = Point::from_ts_point(capture.node.start_position());
2893 outdent.get_or_insert(point);
2894 outdent_positions.push(point);
2895 }
2896 }
2897
2898 matches.advance();
2899 // in case of significant indentation expand end to outdent position
2900 let end = if significant_indentation {
2901 outdent.or(end)
2902 } else {
2903 end
2904 };
2905 if let Some((start, end)) = start.zip(end) {
2906 if start.row == end.row && !significant_indentation {
2907 continue;
2908 }
2909 let range = start..end;
2910 match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
2911 Err(ix) => indent_ranges.insert(ix, range),
2912 Ok(ix) => {
2913 let prev_range = &mut indent_ranges[ix];
2914 prev_range.end = prev_range.end.max(range.end);
2915 }
2916 }
2917 }
2918 }
2919
2920 let mut error_ranges = Vec::<Range<Point>>::new();
2921 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2922 grammar.error_query.as_ref()
2923 });
2924 while let Some(mat) = matches.peek() {
2925 let node = mat.captures[0].node;
2926 let start = Point::from_ts_point(node.start_position());
2927 let end = Point::from_ts_point(node.end_position());
2928 let range = start..end;
2929 let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
2930 Ok(ix) | Err(ix) => ix,
2931 };
2932 let mut end_ix = ix;
2933 while let Some(existing_range) = error_ranges.get(end_ix) {
2934 if existing_range.end < end {
2935 end_ix += 1;
2936 } else {
2937 break;
2938 }
2939 }
2940 error_ranges.splice(ix..end_ix, [range]);
2941 matches.advance();
2942 }
2943
2944 // we don't use outdent positions to truncate in case of significant indentation
2945 // rather we use them to expand (handled above)
2946 if !significant_indentation {
2947 outdent_positions.sort();
2948 for outdent_position in outdent_positions {
2949 // find the innermost indent range containing this outdent_position
2950 // set its end to the outdent position
2951 if let Some(range_to_truncate) = indent_ranges
2952 .iter_mut()
2953 .filter(|indent_range| indent_range.contains(&outdent_position))
2954 .next_back()
2955 {
2956 range_to_truncate.end = outdent_position;
2957 }
2958 }
2959 }
2960
2961 // Find the suggested indentation increases and decreased based on regexes.
2962 let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
2963 self.for_each_line(
2964 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
2965 ..Point::new(row_range.end, 0),
2966 |row, line| {
2967 if config
2968 .decrease_indent_pattern
2969 .as_ref()
2970 .map_or(false, |regex| regex.is_match(line))
2971 {
2972 indent_change_rows.push((row, Ordering::Less));
2973 }
2974 if config
2975 .increase_indent_pattern
2976 .as_ref()
2977 .map_or(false, |regex| regex.is_match(line))
2978 {
2979 indent_change_rows.push((row + 1, Ordering::Greater));
2980 }
2981 },
2982 );
2983
2984 let mut indent_changes = indent_change_rows.into_iter().peekable();
2985 let mut prev_row = if config.auto_indent_using_last_non_empty_line {
2986 prev_non_blank_row.unwrap_or(0)
2987 } else {
2988 row_range.start.saturating_sub(1)
2989 };
2990 let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
2991 Some(row_range.map(move |row| {
2992 let row_start = Point::new(row, self.indent_size_for_line(row).len);
2993
2994 let mut indent_from_prev_row = false;
2995 let mut outdent_from_prev_row = false;
2996 let mut outdent_to_row = u32::MAX;
2997 let mut from_regex = false;
2998
2999 while let Some((indent_row, delta)) = indent_changes.peek() {
3000 match indent_row.cmp(&row) {
3001 Ordering::Equal => match delta {
3002 Ordering::Less => {
3003 from_regex = true;
3004 outdent_from_prev_row = true
3005 }
3006 Ordering::Greater => {
3007 indent_from_prev_row = true;
3008 from_regex = true
3009 }
3010 _ => {}
3011 },
3012
3013 Ordering::Greater => break,
3014 Ordering::Less => {}
3015 }
3016
3017 indent_changes.next();
3018 }
3019
3020 for range in &indent_ranges {
3021 if range.start.row >= row {
3022 break;
3023 }
3024 if range.start.row == prev_row && range.end > row_start {
3025 indent_from_prev_row = true;
3026 }
3027 if significant_indentation && self.is_line_blank(row) && range.start.row == prev_row
3028 {
3029 indent_from_prev_row = true;
3030 }
3031 if !significant_indentation || !self.is_line_blank(row) {
3032 if range.end > prev_row_start && range.end <= row_start {
3033 outdent_to_row = outdent_to_row.min(range.start.row);
3034 }
3035 }
3036 }
3037
3038 let within_error = error_ranges
3039 .iter()
3040 .any(|e| e.start.row < row && e.end > row_start);
3041
3042 let suggestion = if outdent_to_row == prev_row
3043 || (outdent_from_prev_row && indent_from_prev_row)
3044 {
3045 Some(IndentSuggestion {
3046 basis_row: prev_row,
3047 delta: Ordering::Equal,
3048 within_error: within_error && !from_regex,
3049 })
3050 } else if indent_from_prev_row {
3051 Some(IndentSuggestion {
3052 basis_row: prev_row,
3053 delta: Ordering::Greater,
3054 within_error: within_error && !from_regex,
3055 })
3056 } else if outdent_to_row < prev_row {
3057 Some(IndentSuggestion {
3058 basis_row: outdent_to_row,
3059 delta: Ordering::Equal,
3060 within_error: within_error && !from_regex,
3061 })
3062 } else if outdent_from_prev_row {
3063 Some(IndentSuggestion {
3064 basis_row: prev_row,
3065 delta: Ordering::Less,
3066 within_error: within_error && !from_regex,
3067 })
3068 } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
3069 {
3070 Some(IndentSuggestion {
3071 basis_row: prev_row,
3072 delta: Ordering::Equal,
3073 within_error: within_error && !from_regex,
3074 })
3075 } else {
3076 None
3077 };
3078
3079 prev_row = row;
3080 prev_row_start = row_start;
3081 suggestion
3082 }))
3083 }
3084
3085 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
3086 while row > 0 {
3087 row -= 1;
3088 if !self.is_line_blank(row) {
3089 return Some(row);
3090 }
3091 }
3092 None
3093 }
3094
3095 fn get_highlights(&self, range: Range<usize>) -> (SyntaxMapCaptures, Vec<HighlightMap>) {
3096 let captures = self.syntax.captures(range, &self.text, |grammar| {
3097 grammar.highlights_query.as_ref()
3098 });
3099 let highlight_maps = captures
3100 .grammars()
3101 .iter()
3102 .map(|grammar| grammar.highlight_map())
3103 .collect();
3104 (captures, highlight_maps)
3105 }
3106
3107 /// Iterates over chunks of text in the given range of the buffer. Text is chunked
3108 /// in an arbitrary way due to being stored in a [`Rope`](text::Rope). The text is also
3109 /// returned in chunks where each chunk has a single syntax highlighting style and
3110 /// diagnostic status.
3111 pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks {
3112 let range = range.start.to_offset(self)..range.end.to_offset(self);
3113
3114 let mut syntax = None;
3115 if language_aware {
3116 syntax = Some(self.get_highlights(range.clone()));
3117 }
3118 // We want to look at diagnostic spans only when iterating over language-annotated chunks.
3119 let diagnostics = language_aware;
3120 BufferChunks::new(self.text.as_rope(), range, syntax, diagnostics, Some(self))
3121 }
3122
3123 pub fn highlighted_text_for_range<T: ToOffset>(
3124 &self,
3125 range: Range<T>,
3126 override_style: Option<HighlightStyle>,
3127 syntax_theme: &SyntaxTheme,
3128 ) -> HighlightedText {
3129 HighlightedText::from_buffer_range(
3130 range,
3131 &self.text,
3132 &self.syntax,
3133 override_style,
3134 syntax_theme,
3135 )
3136 }
3137
3138 /// Invokes the given callback for each line of text in the given range of the buffer.
3139 /// Uses callback to avoid allocating a string for each line.
3140 fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
3141 let mut line = String::new();
3142 let mut row = range.start.row;
3143 for chunk in self
3144 .as_rope()
3145 .chunks_in_range(range.to_offset(self))
3146 .chain(["\n"])
3147 {
3148 for (newline_ix, text) in chunk.split('\n').enumerate() {
3149 if newline_ix > 0 {
3150 callback(row, &line);
3151 row += 1;
3152 line.clear();
3153 }
3154 line.push_str(text);
3155 }
3156 }
3157 }
3158
3159 /// Iterates over every [`SyntaxLayer`] in the buffer.
3160 pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayer> + '_ {
3161 self.syntax
3162 .layers_for_range(0..self.len(), &self.text, true)
3163 }
3164
3165 pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayer> {
3166 let offset = position.to_offset(self);
3167 self.syntax
3168 .layers_for_range(offset..offset, &self.text, false)
3169 .filter(|l| l.node().end_byte() > offset)
3170 .last()
3171 }
3172
3173 pub fn smallest_syntax_layer_containing<D: ToOffset>(
3174 &self,
3175 range: Range<D>,
3176 ) -> Option<SyntaxLayer> {
3177 let range = range.to_offset(self);
3178 return self
3179 .syntax
3180 .layers_for_range(range, &self.text, false)
3181 .max_by(|a, b| {
3182 if a.depth != b.depth {
3183 a.depth.cmp(&b.depth)
3184 } else if a.offset.0 != b.offset.0 {
3185 a.offset.0.cmp(&b.offset.0)
3186 } else {
3187 a.node().end_byte().cmp(&b.node().end_byte()).reverse()
3188 }
3189 });
3190 }
3191
3192 /// Returns the main [`Language`].
3193 pub fn language(&self) -> Option<&Arc<Language>> {
3194 self.language.as_ref()
3195 }
3196
3197 /// Returns the [`Language`] at the given location.
3198 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
3199 self.syntax_layer_at(position)
3200 .map(|info| info.language)
3201 .or(self.language.as_ref())
3202 }
3203
3204 /// Returns the settings for the language at the given location.
3205 pub fn settings_at<'a, D: ToOffset>(
3206 &'a self,
3207 position: D,
3208 cx: &'a App,
3209 ) -> Cow<'a, LanguageSettings> {
3210 language_settings(
3211 self.language_at(position).map(|l| l.name()),
3212 self.file.as_ref(),
3213 cx,
3214 )
3215 }
3216
3217 pub fn char_classifier_at<T: ToOffset>(&self, point: T) -> CharClassifier {
3218 CharClassifier::new(self.language_scope_at(point))
3219 }
3220
3221 /// Returns the [`LanguageScope`] at the given location.
3222 pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
3223 let offset = position.to_offset(self);
3224 let mut scope = None;
3225 let mut smallest_range_and_depth: Option<(Range<usize>, usize)> = None;
3226
3227 // Use the layer that has the smallest node intersecting the given point.
3228 for layer in self
3229 .syntax
3230 .layers_for_range(offset..offset, &self.text, false)
3231 {
3232 let mut cursor = layer.node().walk();
3233
3234 let mut range = None;
3235 loop {
3236 let child_range = cursor.node().byte_range();
3237 if !child_range.contains(&offset) {
3238 break;
3239 }
3240
3241 range = Some(child_range);
3242 if cursor.goto_first_child_for_byte(offset).is_none() {
3243 break;
3244 }
3245 }
3246
3247 if let Some(range) = range {
3248 if smallest_range_and_depth.as_ref().map_or(
3249 true,
3250 |(smallest_range, smallest_range_depth)| {
3251 if layer.depth > *smallest_range_depth {
3252 true
3253 } else if layer.depth == *smallest_range_depth {
3254 range.len() < smallest_range.len()
3255 } else {
3256 false
3257 }
3258 },
3259 ) {
3260 smallest_range_and_depth = Some((range, layer.depth));
3261 scope = Some(LanguageScope {
3262 language: layer.language.clone(),
3263 override_id: layer.override_id(offset, &self.text),
3264 });
3265 }
3266 }
3267 }
3268
3269 scope.or_else(|| {
3270 self.language.clone().map(|language| LanguageScope {
3271 language,
3272 override_id: None,
3273 })
3274 })
3275 }
3276
3277 /// Returns a tuple of the range and character kind of the word
3278 /// surrounding the given position.
3279 pub fn surrounding_word<T: ToOffset>(&self, start: T) -> (Range<usize>, Option<CharKind>) {
3280 let mut start = start.to_offset(self);
3281 let mut end = start;
3282 let mut next_chars = self.chars_at(start).peekable();
3283 let mut prev_chars = self.reversed_chars_at(start).peekable();
3284
3285 let classifier = self.char_classifier_at(start);
3286 let word_kind = cmp::max(
3287 prev_chars.peek().copied().map(|c| classifier.kind(c)),
3288 next_chars.peek().copied().map(|c| classifier.kind(c)),
3289 );
3290
3291 for ch in prev_chars {
3292 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3293 start -= ch.len_utf8();
3294 } else {
3295 break;
3296 }
3297 }
3298
3299 for ch in next_chars {
3300 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3301 end += ch.len_utf8();
3302 } else {
3303 break;
3304 }
3305 }
3306
3307 (start..end, word_kind)
3308 }
3309
3310 /// Returns the closest syntax node enclosing the given range.
3311 pub fn syntax_ancestor<'a, T: ToOffset>(
3312 &'a self,
3313 range: Range<T>,
3314 ) -> Option<tree_sitter::Node<'a>> {
3315 let range = range.start.to_offset(self)..range.end.to_offset(self);
3316 let mut result: Option<tree_sitter::Node<'a>> = None;
3317 'outer: for layer in self
3318 .syntax
3319 .layers_for_range(range.clone(), &self.text, true)
3320 {
3321 let mut cursor = layer.node().walk();
3322
3323 // Descend to the first leaf that touches the start of the range.
3324 //
3325 // If the range is non-empty and the current node ends exactly at the start,
3326 // move to the next sibling to find a node that extends beyond the start.
3327 //
3328 // If the range is empty and the current node starts after the range position,
3329 // move to the previous sibling to find the node that contains the position.
3330 while cursor.goto_first_child_for_byte(range.start).is_some() {
3331 if !range.is_empty() && cursor.node().end_byte() == range.start {
3332 cursor.goto_next_sibling();
3333 }
3334 if range.is_empty() && cursor.node().start_byte() > range.start {
3335 cursor.goto_previous_sibling();
3336 }
3337 }
3338
3339 // Ascend to the smallest ancestor that strictly contains the range.
3340 loop {
3341 let node_range = cursor.node().byte_range();
3342 if node_range.start <= range.start
3343 && node_range.end >= range.end
3344 && node_range.len() > range.len()
3345 {
3346 break;
3347 }
3348 if !cursor.goto_parent() {
3349 continue 'outer;
3350 }
3351 }
3352
3353 let left_node = cursor.node();
3354 let mut layer_result = left_node;
3355
3356 // For an empty range, try to find another node immediately to the right of the range.
3357 if left_node.end_byte() == range.start {
3358 let mut right_node = None;
3359 while !cursor.goto_next_sibling() {
3360 if !cursor.goto_parent() {
3361 break;
3362 }
3363 }
3364
3365 while cursor.node().start_byte() == range.start {
3366 right_node = Some(cursor.node());
3367 if !cursor.goto_first_child() {
3368 break;
3369 }
3370 }
3371
3372 // If there is a candidate node on both sides of the (empty) range, then
3373 // decide between the two by favoring a named node over an anonymous token.
3374 // If both nodes are the same in that regard, favor the right one.
3375 if let Some(right_node) = right_node {
3376 if right_node.is_named() || !left_node.is_named() {
3377 layer_result = right_node;
3378 }
3379 }
3380 }
3381
3382 if let Some(previous_result) = &result {
3383 if previous_result.byte_range().len() < layer_result.byte_range().len() {
3384 continue;
3385 }
3386 }
3387 result = Some(layer_result);
3388 }
3389
3390 result
3391 }
3392
3393 /// Returns the root syntax node within the given row
3394 pub fn syntax_root_ancestor(&self, position: Anchor) -> Option<tree_sitter::Node> {
3395 let start_offset = position.to_offset(self);
3396
3397 let row = self.summary_for_anchor::<text::PointUtf16>(&position).row as usize;
3398
3399 let layer = self
3400 .syntax
3401 .layers_for_range(start_offset..start_offset, &self.text, true)
3402 .next()?;
3403
3404 let mut cursor = layer.node().walk();
3405
3406 // Descend to the first leaf that touches the start of the range.
3407 while cursor.goto_first_child_for_byte(start_offset).is_some() {
3408 if cursor.node().end_byte() == start_offset {
3409 cursor.goto_next_sibling();
3410 }
3411 }
3412
3413 // Ascend to the root node within the same row.
3414 while cursor.goto_parent() {
3415 if cursor.node().start_position().row != row {
3416 break;
3417 }
3418 }
3419
3420 return Some(cursor.node());
3421 }
3422
3423 /// Returns the outline for the buffer.
3424 ///
3425 /// This method allows passing an optional [`SyntaxTheme`] to
3426 /// syntax-highlight the returned symbols.
3427 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Option<Outline<Anchor>> {
3428 self.outline_items_containing(0..self.len(), true, theme)
3429 .map(Outline::new)
3430 }
3431
3432 /// Returns all the symbols that contain the given position.
3433 ///
3434 /// This method allows passing an optional [`SyntaxTheme`] to
3435 /// syntax-highlight the returned symbols.
3436 pub fn symbols_containing<T: ToOffset>(
3437 &self,
3438 position: T,
3439 theme: Option<&SyntaxTheme>,
3440 ) -> Option<Vec<OutlineItem<Anchor>>> {
3441 let position = position.to_offset(self);
3442 let mut items = self.outline_items_containing(
3443 position.saturating_sub(1)..self.len().min(position + 1),
3444 false,
3445 theme,
3446 )?;
3447 let mut prev_depth = None;
3448 items.retain(|item| {
3449 let result = prev_depth.map_or(true, |prev_depth| item.depth > prev_depth);
3450 prev_depth = Some(item.depth);
3451 result
3452 });
3453 Some(items)
3454 }
3455
3456 pub fn outline_range_containing<T: ToOffset>(&self, range: Range<T>) -> Option<Range<Point>> {
3457 let range = range.to_offset(self);
3458 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3459 grammar.outline_config.as_ref().map(|c| &c.query)
3460 });
3461 let configs = matches
3462 .grammars()
3463 .iter()
3464 .map(|g| g.outline_config.as_ref().unwrap())
3465 .collect::<Vec<_>>();
3466
3467 while let Some(mat) = matches.peek() {
3468 let config = &configs[mat.grammar_index];
3469 let containing_item_node = maybe!({
3470 let item_node = mat.captures.iter().find_map(|cap| {
3471 if cap.index == config.item_capture_ix {
3472 Some(cap.node)
3473 } else {
3474 None
3475 }
3476 })?;
3477
3478 let item_byte_range = item_node.byte_range();
3479 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3480 None
3481 } else {
3482 Some(item_node)
3483 }
3484 });
3485
3486 if let Some(item_node) = containing_item_node {
3487 return Some(
3488 Point::from_ts_point(item_node.start_position())
3489 ..Point::from_ts_point(item_node.end_position()),
3490 );
3491 }
3492
3493 matches.advance();
3494 }
3495 None
3496 }
3497
3498 pub fn outline_items_containing<T: ToOffset>(
3499 &self,
3500 range: Range<T>,
3501 include_extra_context: bool,
3502 theme: Option<&SyntaxTheme>,
3503 ) -> Option<Vec<OutlineItem<Anchor>>> {
3504 let range = range.to_offset(self);
3505 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3506 grammar.outline_config.as_ref().map(|c| &c.query)
3507 });
3508 let configs = matches
3509 .grammars()
3510 .iter()
3511 .map(|g| g.outline_config.as_ref().unwrap())
3512 .collect::<Vec<_>>();
3513
3514 let mut items = Vec::new();
3515 let mut annotation_row_ranges: Vec<Range<u32>> = Vec::new();
3516 while let Some(mat) = matches.peek() {
3517 let config = &configs[mat.grammar_index];
3518 if let Some(item) =
3519 self.next_outline_item(config, &mat, &range, include_extra_context, theme)
3520 {
3521 items.push(item);
3522 } else if let Some(capture) = mat
3523 .captures
3524 .iter()
3525 .find(|capture| Some(capture.index) == config.annotation_capture_ix)
3526 {
3527 let capture_range = capture.node.start_position()..capture.node.end_position();
3528 let mut capture_row_range =
3529 capture_range.start.row as u32..capture_range.end.row as u32;
3530 if capture_range.end.row > capture_range.start.row && capture_range.end.column == 0
3531 {
3532 capture_row_range.end -= 1;
3533 }
3534 if let Some(last_row_range) = annotation_row_ranges.last_mut() {
3535 if last_row_range.end >= capture_row_range.start.saturating_sub(1) {
3536 last_row_range.end = capture_row_range.end;
3537 } else {
3538 annotation_row_ranges.push(capture_row_range);
3539 }
3540 } else {
3541 annotation_row_ranges.push(capture_row_range);
3542 }
3543 }
3544 matches.advance();
3545 }
3546
3547 items.sort_by_key(|item| (item.range.start, Reverse(item.range.end)));
3548
3549 // Assign depths based on containment relationships and convert to anchors.
3550 let mut item_ends_stack = Vec::<Point>::new();
3551 let mut anchor_items = Vec::new();
3552 let mut annotation_row_ranges = annotation_row_ranges.into_iter().peekable();
3553 for item in items {
3554 while let Some(last_end) = item_ends_stack.last().copied() {
3555 if last_end < item.range.end {
3556 item_ends_stack.pop();
3557 } else {
3558 break;
3559 }
3560 }
3561
3562 let mut annotation_row_range = None;
3563 while let Some(next_annotation_row_range) = annotation_row_ranges.peek() {
3564 let row_preceding_item = item.range.start.row.saturating_sub(1);
3565 if next_annotation_row_range.end < row_preceding_item {
3566 annotation_row_ranges.next();
3567 } else {
3568 if next_annotation_row_range.end == row_preceding_item {
3569 annotation_row_range = Some(next_annotation_row_range.clone());
3570 annotation_row_ranges.next();
3571 }
3572 break;
3573 }
3574 }
3575
3576 anchor_items.push(OutlineItem {
3577 depth: item_ends_stack.len(),
3578 range: self.anchor_after(item.range.start)..self.anchor_before(item.range.end),
3579 text: item.text,
3580 highlight_ranges: item.highlight_ranges,
3581 name_ranges: item.name_ranges,
3582 body_range: item.body_range.map(|body_range| {
3583 self.anchor_after(body_range.start)..self.anchor_before(body_range.end)
3584 }),
3585 annotation_range: annotation_row_range.map(|annotation_range| {
3586 self.anchor_after(Point::new(annotation_range.start, 0))
3587 ..self.anchor_before(Point::new(
3588 annotation_range.end,
3589 self.line_len(annotation_range.end),
3590 ))
3591 }),
3592 });
3593 item_ends_stack.push(item.range.end);
3594 }
3595
3596 Some(anchor_items)
3597 }
3598
3599 fn next_outline_item(
3600 &self,
3601 config: &OutlineConfig,
3602 mat: &SyntaxMapMatch,
3603 range: &Range<usize>,
3604 include_extra_context: bool,
3605 theme: Option<&SyntaxTheme>,
3606 ) -> Option<OutlineItem<Point>> {
3607 let item_node = mat.captures.iter().find_map(|cap| {
3608 if cap.index == config.item_capture_ix {
3609 Some(cap.node)
3610 } else {
3611 None
3612 }
3613 })?;
3614
3615 let item_byte_range = item_node.byte_range();
3616 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3617 return None;
3618 }
3619 let item_point_range = Point::from_ts_point(item_node.start_position())
3620 ..Point::from_ts_point(item_node.end_position());
3621
3622 let mut open_point = None;
3623 let mut close_point = None;
3624 let mut buffer_ranges = Vec::new();
3625 for capture in mat.captures {
3626 let node_is_name;
3627 if capture.index == config.name_capture_ix {
3628 node_is_name = true;
3629 } else if Some(capture.index) == config.context_capture_ix
3630 || (Some(capture.index) == config.extra_context_capture_ix && include_extra_context)
3631 {
3632 node_is_name = false;
3633 } else {
3634 if Some(capture.index) == config.open_capture_ix {
3635 open_point = Some(Point::from_ts_point(capture.node.end_position()));
3636 } else if Some(capture.index) == config.close_capture_ix {
3637 close_point = Some(Point::from_ts_point(capture.node.start_position()));
3638 }
3639
3640 continue;
3641 }
3642
3643 let mut range = capture.node.start_byte()..capture.node.end_byte();
3644 let start = capture.node.start_position();
3645 if capture.node.end_position().row > start.row {
3646 range.end = range.start + self.line_len(start.row as u32) as usize - start.column;
3647 }
3648
3649 if !range.is_empty() {
3650 buffer_ranges.push((range, node_is_name));
3651 }
3652 }
3653 if buffer_ranges.is_empty() {
3654 return None;
3655 }
3656 let mut text = String::new();
3657 let mut highlight_ranges = Vec::new();
3658 let mut name_ranges = Vec::new();
3659 let mut chunks = self.chunks(
3660 buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end,
3661 true,
3662 );
3663 let mut last_buffer_range_end = 0;
3664
3665 for (buffer_range, is_name) in buffer_ranges {
3666 let space_added = !text.is_empty() && buffer_range.start > last_buffer_range_end;
3667 if space_added {
3668 text.push(' ');
3669 }
3670 let before_append_len = text.len();
3671 let mut offset = buffer_range.start;
3672 chunks.seek(buffer_range.clone());
3673 for mut chunk in chunks.by_ref() {
3674 if chunk.text.len() > buffer_range.end - offset {
3675 chunk.text = &chunk.text[0..(buffer_range.end - offset)];
3676 offset = buffer_range.end;
3677 } else {
3678 offset += chunk.text.len();
3679 }
3680 let style = chunk
3681 .syntax_highlight_id
3682 .zip(theme)
3683 .and_then(|(highlight, theme)| highlight.style(theme));
3684 if let Some(style) = style {
3685 let start = text.len();
3686 let end = start + chunk.text.len();
3687 highlight_ranges.push((start..end, style));
3688 }
3689 text.push_str(chunk.text);
3690 if offset >= buffer_range.end {
3691 break;
3692 }
3693 }
3694 if is_name {
3695 let after_append_len = text.len();
3696 let start = if space_added && !name_ranges.is_empty() {
3697 before_append_len - 1
3698 } else {
3699 before_append_len
3700 };
3701 name_ranges.push(start..after_append_len);
3702 }
3703 last_buffer_range_end = buffer_range.end;
3704 }
3705
3706 Some(OutlineItem {
3707 depth: 0, // We'll calculate the depth later
3708 range: item_point_range,
3709 text,
3710 highlight_ranges,
3711 name_ranges,
3712 body_range: open_point.zip(close_point).map(|(start, end)| start..end),
3713 annotation_range: None,
3714 })
3715 }
3716
3717 pub fn function_body_fold_ranges<T: ToOffset>(
3718 &self,
3719 within: Range<T>,
3720 ) -> impl Iterator<Item = Range<usize>> + '_ {
3721 self.text_object_ranges(within, TreeSitterOptions::default())
3722 .filter_map(|(range, obj)| (obj == TextObject::InsideFunction).then_some(range))
3723 }
3724
3725 /// For each grammar in the language, runs the provided
3726 /// [`tree_sitter::Query`] against the given range.
3727 pub fn matches(
3728 &self,
3729 range: Range<usize>,
3730 query: fn(&Grammar) -> Option<&tree_sitter::Query>,
3731 ) -> SyntaxMapMatches {
3732 self.syntax.matches(range, self, query)
3733 }
3734
3735 pub fn all_bracket_ranges(
3736 &self,
3737 range: Range<usize>,
3738 ) -> impl Iterator<Item = BracketMatch> + '_ {
3739 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3740 grammar.brackets_config.as_ref().map(|c| &c.query)
3741 });
3742 let configs = matches
3743 .grammars()
3744 .iter()
3745 .map(|grammar| grammar.brackets_config.as_ref().unwrap())
3746 .collect::<Vec<_>>();
3747
3748 iter::from_fn(move || {
3749 while let Some(mat) = matches.peek() {
3750 let mut open = None;
3751 let mut close = None;
3752 let config = &configs[mat.grammar_index];
3753 let pattern = &config.patterns[mat.pattern_index];
3754 for capture in mat.captures {
3755 if capture.index == config.open_capture_ix {
3756 open = Some(capture.node.byte_range());
3757 } else if capture.index == config.close_capture_ix {
3758 close = Some(capture.node.byte_range());
3759 }
3760 }
3761
3762 matches.advance();
3763
3764 let Some((open_range, close_range)) = open.zip(close) else {
3765 continue;
3766 };
3767
3768 let bracket_range = open_range.start..=close_range.end;
3769 if !bracket_range.overlaps(&range) {
3770 continue;
3771 }
3772
3773 return Some(BracketMatch {
3774 open_range,
3775 close_range,
3776 newline_only: pattern.newline_only,
3777 });
3778 }
3779 None
3780 })
3781 }
3782
3783 /// Returns bracket range pairs overlapping or adjacent to `range`
3784 pub fn bracket_ranges<T: ToOffset>(
3785 &self,
3786 range: Range<T>,
3787 ) -> impl Iterator<Item = BracketMatch> + '_ {
3788 // Find bracket pairs that *inclusively* contain the given range.
3789 let range = range.start.to_offset(self).saturating_sub(1)
3790 ..self.len().min(range.end.to_offset(self) + 1);
3791 self.all_bracket_ranges(range)
3792 .filter(|pair| !pair.newline_only)
3793 }
3794
3795 pub fn text_object_ranges<T: ToOffset>(
3796 &self,
3797 range: Range<T>,
3798 options: TreeSitterOptions,
3799 ) -> impl Iterator<Item = (Range<usize>, TextObject)> + '_ {
3800 let range = range.start.to_offset(self).saturating_sub(1)
3801 ..self.len().min(range.end.to_offset(self) + 1);
3802
3803 let mut matches =
3804 self.syntax
3805 .matches_with_options(range.clone(), &self.text, options, |grammar| {
3806 grammar.text_object_config.as_ref().map(|c| &c.query)
3807 });
3808
3809 let configs = matches
3810 .grammars()
3811 .iter()
3812 .map(|grammar| grammar.text_object_config.as_ref())
3813 .collect::<Vec<_>>();
3814
3815 let mut captures = Vec::<(Range<usize>, TextObject)>::new();
3816
3817 iter::from_fn(move || {
3818 loop {
3819 while let Some(capture) = captures.pop() {
3820 if capture.0.overlaps(&range) {
3821 return Some(capture);
3822 }
3823 }
3824
3825 let mat = matches.peek()?;
3826
3827 let Some(config) = configs[mat.grammar_index].as_ref() else {
3828 matches.advance();
3829 continue;
3830 };
3831
3832 for capture in mat.captures {
3833 let Some(ix) = config
3834 .text_objects_by_capture_ix
3835 .binary_search_by_key(&capture.index, |e| e.0)
3836 .ok()
3837 else {
3838 continue;
3839 };
3840 let text_object = config.text_objects_by_capture_ix[ix].1;
3841 let byte_range = capture.node.byte_range();
3842
3843 let mut found = false;
3844 for (range, existing) in captures.iter_mut() {
3845 if existing == &text_object {
3846 range.start = range.start.min(byte_range.start);
3847 range.end = range.end.max(byte_range.end);
3848 found = true;
3849 break;
3850 }
3851 }
3852
3853 if !found {
3854 captures.push((byte_range, text_object));
3855 }
3856 }
3857
3858 matches.advance();
3859 }
3860 })
3861 }
3862
3863 /// Returns enclosing bracket ranges containing the given range
3864 pub fn enclosing_bracket_ranges<T: ToOffset>(
3865 &self,
3866 range: Range<T>,
3867 ) -> impl Iterator<Item = BracketMatch> + '_ {
3868 let range = range.start.to_offset(self)..range.end.to_offset(self);
3869
3870 self.bracket_ranges(range.clone()).filter(move |pair| {
3871 pair.open_range.start <= range.start && pair.close_range.end >= range.end
3872 })
3873 }
3874
3875 /// Returns the smallest enclosing bracket ranges containing the given range or None if no brackets contain range
3876 ///
3877 /// Can optionally pass a range_filter to filter the ranges of brackets to consider
3878 pub fn innermost_enclosing_bracket_ranges<T: ToOffset>(
3879 &self,
3880 range: Range<T>,
3881 range_filter: Option<&dyn Fn(Range<usize>, Range<usize>) -> bool>,
3882 ) -> Option<(Range<usize>, Range<usize>)> {
3883 let range = range.start.to_offset(self)..range.end.to_offset(self);
3884
3885 // Get the ranges of the innermost pair of brackets.
3886 let mut result: Option<(Range<usize>, Range<usize>)> = None;
3887
3888 for pair in self.enclosing_bracket_ranges(range.clone()) {
3889 if let Some(range_filter) = range_filter {
3890 if !range_filter(pair.open_range.clone(), pair.close_range.clone()) {
3891 continue;
3892 }
3893 }
3894
3895 let len = pair.close_range.end - pair.open_range.start;
3896
3897 if let Some((existing_open, existing_close)) = &result {
3898 let existing_len = existing_close.end - existing_open.start;
3899 if len > existing_len {
3900 continue;
3901 }
3902 }
3903
3904 result = Some((pair.open_range, pair.close_range));
3905 }
3906
3907 result
3908 }
3909
3910 /// Returns anchor ranges for any matches of the redaction query.
3911 /// The buffer can be associated with multiple languages, and the redaction query associated with each
3912 /// will be run on the relevant section of the buffer.
3913 pub fn redacted_ranges<T: ToOffset>(
3914 &self,
3915 range: Range<T>,
3916 ) -> impl Iterator<Item = Range<usize>> + '_ {
3917 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
3918 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
3919 grammar
3920 .redactions_config
3921 .as_ref()
3922 .map(|config| &config.query)
3923 });
3924
3925 let configs = syntax_matches
3926 .grammars()
3927 .iter()
3928 .map(|grammar| grammar.redactions_config.as_ref())
3929 .collect::<Vec<_>>();
3930
3931 iter::from_fn(move || {
3932 let redacted_range = syntax_matches
3933 .peek()
3934 .and_then(|mat| {
3935 configs[mat.grammar_index].and_then(|config| {
3936 mat.captures
3937 .iter()
3938 .find(|capture| capture.index == config.redaction_capture_ix)
3939 })
3940 })
3941 .map(|mat| mat.node.byte_range());
3942 syntax_matches.advance();
3943 redacted_range
3944 })
3945 }
3946
3947 pub fn injections_intersecting_range<T: ToOffset>(
3948 &self,
3949 range: Range<T>,
3950 ) -> impl Iterator<Item = (Range<usize>, &Arc<Language>)> + '_ {
3951 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
3952
3953 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
3954 grammar
3955 .injection_config
3956 .as_ref()
3957 .map(|config| &config.query)
3958 });
3959
3960 let configs = syntax_matches
3961 .grammars()
3962 .iter()
3963 .map(|grammar| grammar.injection_config.as_ref())
3964 .collect::<Vec<_>>();
3965
3966 iter::from_fn(move || {
3967 let ranges = syntax_matches.peek().and_then(|mat| {
3968 let config = &configs[mat.grammar_index]?;
3969 let content_capture_range = mat.captures.iter().find_map(|capture| {
3970 if capture.index == config.content_capture_ix {
3971 Some(capture.node.byte_range())
3972 } else {
3973 None
3974 }
3975 })?;
3976 let language = self.language_at(content_capture_range.start)?;
3977 Some((content_capture_range, language))
3978 });
3979 syntax_matches.advance();
3980 ranges
3981 })
3982 }
3983
3984 pub fn runnable_ranges(
3985 &self,
3986 offset_range: Range<usize>,
3987 ) -> impl Iterator<Item = RunnableRange> + '_ {
3988 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
3989 grammar.runnable_config.as_ref().map(|config| &config.query)
3990 });
3991
3992 let test_configs = syntax_matches
3993 .grammars()
3994 .iter()
3995 .map(|grammar| grammar.runnable_config.as_ref())
3996 .collect::<Vec<_>>();
3997
3998 iter::from_fn(move || {
3999 loop {
4000 let mat = syntax_matches.peek()?;
4001
4002 let test_range = test_configs[mat.grammar_index].and_then(|test_configs| {
4003 let mut run_range = None;
4004 let full_range = mat.captures.iter().fold(
4005 Range {
4006 start: usize::MAX,
4007 end: 0,
4008 },
4009 |mut acc, next| {
4010 let byte_range = next.node.byte_range();
4011 if acc.start > byte_range.start {
4012 acc.start = byte_range.start;
4013 }
4014 if acc.end < byte_range.end {
4015 acc.end = byte_range.end;
4016 }
4017 acc
4018 },
4019 );
4020 if full_range.start > full_range.end {
4021 // We did not find a full spanning range of this match.
4022 return None;
4023 }
4024 let extra_captures: SmallVec<[_; 1]> =
4025 SmallVec::from_iter(mat.captures.iter().filter_map(|capture| {
4026 test_configs
4027 .extra_captures
4028 .get(capture.index as usize)
4029 .cloned()
4030 .and_then(|tag_name| match tag_name {
4031 RunnableCapture::Named(name) => {
4032 Some((capture.node.byte_range(), name))
4033 }
4034 RunnableCapture::Run => {
4035 let _ = run_range.insert(capture.node.byte_range());
4036 None
4037 }
4038 })
4039 }));
4040 let run_range = run_range?;
4041 let tags = test_configs
4042 .query
4043 .property_settings(mat.pattern_index)
4044 .iter()
4045 .filter_map(|property| {
4046 if *property.key == *"tag" {
4047 property
4048 .value
4049 .as_ref()
4050 .map(|value| RunnableTag(value.to_string().into()))
4051 } else {
4052 None
4053 }
4054 })
4055 .collect();
4056 let extra_captures = extra_captures
4057 .into_iter()
4058 .map(|(range, name)| {
4059 (
4060 name.to_string(),
4061 self.text_for_range(range.clone()).collect::<String>(),
4062 )
4063 })
4064 .collect();
4065 // All tags should have the same range.
4066 Some(RunnableRange {
4067 run_range,
4068 full_range,
4069 runnable: Runnable {
4070 tags,
4071 language: mat.language,
4072 buffer: self.remote_id(),
4073 },
4074 extra_captures,
4075 buffer_id: self.remote_id(),
4076 })
4077 });
4078
4079 syntax_matches.advance();
4080 if test_range.is_some() {
4081 // It's fine for us to short-circuit on .peek()? returning None. We don't want to return None from this iter if we
4082 // had a capture that did not contain a run marker, hence we'll just loop around for the next capture.
4083 return test_range;
4084 }
4085 }
4086 })
4087 }
4088
4089 /// Returns selections for remote peers intersecting the given range.
4090 #[allow(clippy::type_complexity)]
4091 pub fn selections_in_range(
4092 &self,
4093 range: Range<Anchor>,
4094 include_local: bool,
4095 ) -> impl Iterator<
4096 Item = (
4097 ReplicaId,
4098 bool,
4099 CursorShape,
4100 impl Iterator<Item = &Selection<Anchor>> + '_,
4101 ),
4102 > + '_ {
4103 self.remote_selections
4104 .iter()
4105 .filter(move |(replica_id, set)| {
4106 (include_local || **replica_id != self.text.replica_id())
4107 && !set.selections.is_empty()
4108 })
4109 .map(move |(replica_id, set)| {
4110 let start_ix = match set.selections.binary_search_by(|probe| {
4111 probe.end.cmp(&range.start, self).then(Ordering::Greater)
4112 }) {
4113 Ok(ix) | Err(ix) => ix,
4114 };
4115 let end_ix = match set.selections.binary_search_by(|probe| {
4116 probe.start.cmp(&range.end, self).then(Ordering::Less)
4117 }) {
4118 Ok(ix) | Err(ix) => ix,
4119 };
4120
4121 (
4122 *replica_id,
4123 set.line_mode,
4124 set.cursor_shape,
4125 set.selections[start_ix..end_ix].iter(),
4126 )
4127 })
4128 }
4129
4130 /// Returns if the buffer contains any diagnostics.
4131 pub fn has_diagnostics(&self) -> bool {
4132 !self.diagnostics.is_empty()
4133 }
4134
4135 /// Returns all the diagnostics intersecting the given range.
4136 pub fn diagnostics_in_range<'a, T, O>(
4137 &'a self,
4138 search_range: Range<T>,
4139 reversed: bool,
4140 ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
4141 where
4142 T: 'a + Clone + ToOffset,
4143 O: 'a + FromAnchor,
4144 {
4145 let mut iterators: Vec<_> = self
4146 .diagnostics
4147 .iter()
4148 .map(|(_, collection)| {
4149 collection
4150 .range::<T, text::Anchor>(search_range.clone(), self, true, reversed)
4151 .peekable()
4152 })
4153 .collect();
4154
4155 std::iter::from_fn(move || {
4156 let (next_ix, _) = iterators
4157 .iter_mut()
4158 .enumerate()
4159 .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
4160 .min_by(|(_, a), (_, b)| {
4161 let cmp = a
4162 .range
4163 .start
4164 .cmp(&b.range.start, self)
4165 // when range is equal, sort by diagnostic severity
4166 .then(a.diagnostic.severity.cmp(&b.diagnostic.severity))
4167 // and stabilize order with group_id
4168 .then(a.diagnostic.group_id.cmp(&b.diagnostic.group_id));
4169 if reversed { cmp.reverse() } else { cmp }
4170 })?;
4171 iterators[next_ix]
4172 .next()
4173 .map(|DiagnosticEntry { range, diagnostic }| DiagnosticEntry {
4174 diagnostic,
4175 range: FromAnchor::from_anchor(&range.start, self)
4176 ..FromAnchor::from_anchor(&range.end, self),
4177 })
4178 })
4179 }
4180
4181 /// Returns all the diagnostic groups associated with the given
4182 /// language server ID. If no language server ID is provided,
4183 /// all diagnostics groups are returned.
4184 pub fn diagnostic_groups(
4185 &self,
4186 language_server_id: Option<LanguageServerId>,
4187 ) -> Vec<(LanguageServerId, DiagnosticGroup<Anchor>)> {
4188 let mut groups = Vec::new();
4189
4190 if let Some(language_server_id) = language_server_id {
4191 if let Ok(ix) = self
4192 .diagnostics
4193 .binary_search_by_key(&language_server_id, |e| e.0)
4194 {
4195 self.diagnostics[ix]
4196 .1
4197 .groups(language_server_id, &mut groups, self);
4198 }
4199 } else {
4200 for (language_server_id, diagnostics) in self.diagnostics.iter() {
4201 diagnostics.groups(*language_server_id, &mut groups, self);
4202 }
4203 }
4204
4205 groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
4206 let a_start = &group_a.entries[group_a.primary_ix].range.start;
4207 let b_start = &group_b.entries[group_b.primary_ix].range.start;
4208 a_start.cmp(b_start, self).then_with(|| id_a.cmp(id_b))
4209 });
4210
4211 groups
4212 }
4213
4214 /// Returns an iterator over the diagnostics for the given group.
4215 pub fn diagnostic_group<O>(
4216 &self,
4217 group_id: usize,
4218 ) -> impl Iterator<Item = DiagnosticEntry<O>> + '_
4219 where
4220 O: FromAnchor + 'static,
4221 {
4222 self.diagnostics
4223 .iter()
4224 .flat_map(move |(_, set)| set.group(group_id, self))
4225 }
4226
4227 /// An integer version number that accounts for all updates besides
4228 /// the buffer's text itself (which is versioned via a version vector).
4229 pub fn non_text_state_update_count(&self) -> usize {
4230 self.non_text_state_update_count
4231 }
4232
4233 /// Returns a snapshot of underlying file.
4234 pub fn file(&self) -> Option<&Arc<dyn File>> {
4235 self.file.as_ref()
4236 }
4237
4238 /// Resolves the file path (relative to the worktree root) associated with the underlying file.
4239 pub fn resolve_file_path(&self, cx: &App, include_root: bool) -> Option<PathBuf> {
4240 if let Some(file) = self.file() {
4241 if file.path().file_name().is_none() || include_root {
4242 Some(file.full_path(cx))
4243 } else {
4244 Some(file.path().to_path_buf())
4245 }
4246 } else {
4247 None
4248 }
4249 }
4250
4251 pub fn words_in_range(&self, query: WordsQuery) -> BTreeMap<String, Range<Anchor>> {
4252 let query_str = query.fuzzy_contents;
4253 if query_str.map_or(false, |query| query.is_empty()) {
4254 return BTreeMap::default();
4255 }
4256
4257 let classifier = CharClassifier::new(self.language.clone().map(|language| LanguageScope {
4258 language,
4259 override_id: None,
4260 }));
4261
4262 let mut query_ix = 0;
4263 let query_chars = query_str.map(|query| query.chars().collect::<Vec<_>>());
4264 let query_len = query_chars.as_ref().map_or(0, |query| query.len());
4265
4266 let mut words = BTreeMap::default();
4267 let mut current_word_start_ix = None;
4268 let mut chunk_ix = query.range.start;
4269 for chunk in self.chunks(query.range, false) {
4270 for (i, c) in chunk.text.char_indices() {
4271 let ix = chunk_ix + i;
4272 if classifier.is_word(c) {
4273 if current_word_start_ix.is_none() {
4274 current_word_start_ix = Some(ix);
4275 }
4276
4277 if let Some(query_chars) = &query_chars {
4278 if query_ix < query_len {
4279 if c.to_lowercase().eq(query_chars[query_ix].to_lowercase()) {
4280 query_ix += 1;
4281 }
4282 }
4283 }
4284 continue;
4285 } else if let Some(word_start) = current_word_start_ix.take() {
4286 if query_ix == query_len {
4287 let word_range = self.anchor_before(word_start)..self.anchor_after(ix);
4288 let mut word_text = self.text_for_range(word_start..ix).peekable();
4289 let first_char = word_text
4290 .peek()
4291 .and_then(|first_chunk| first_chunk.chars().next());
4292 // Skip empty and "words" starting with digits as a heuristic to reduce useless completions
4293 if !query.skip_digits
4294 || first_char.map_or(true, |first_char| !first_char.is_digit(10))
4295 {
4296 words.insert(word_text.collect(), word_range);
4297 }
4298 }
4299 }
4300 query_ix = 0;
4301 }
4302 chunk_ix += chunk.text.len();
4303 }
4304
4305 words
4306 }
4307}
4308
4309pub struct WordsQuery<'a> {
4310 /// Only returns words with all chars from the fuzzy string in them.
4311 pub fuzzy_contents: Option<&'a str>,
4312 /// Skips words that start with a digit.
4313 pub skip_digits: bool,
4314 /// Buffer offset range, to look for words.
4315 pub range: Range<usize>,
4316}
4317
4318fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
4319 indent_size_for_text(text.chars_at(Point::new(row, 0)))
4320}
4321
4322fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
4323 let mut result = IndentSize::spaces(0);
4324 for c in text {
4325 let kind = match c {
4326 ' ' => IndentKind::Space,
4327 '\t' => IndentKind::Tab,
4328 _ => break,
4329 };
4330 if result.len == 0 {
4331 result.kind = kind;
4332 }
4333 result.len += 1;
4334 }
4335 result
4336}
4337
4338impl Clone for BufferSnapshot {
4339 fn clone(&self) -> Self {
4340 Self {
4341 text: self.text.clone(),
4342 syntax: self.syntax.clone(),
4343 file: self.file.clone(),
4344 remote_selections: self.remote_selections.clone(),
4345 diagnostics: self.diagnostics.clone(),
4346 language: self.language.clone(),
4347 non_text_state_update_count: self.non_text_state_update_count,
4348 }
4349 }
4350}
4351
4352impl Deref for BufferSnapshot {
4353 type Target = text::BufferSnapshot;
4354
4355 fn deref(&self) -> &Self::Target {
4356 &self.text
4357 }
4358}
4359
4360unsafe impl Send for BufferChunks<'_> {}
4361
4362impl<'a> BufferChunks<'a> {
4363 pub(crate) fn new(
4364 text: &'a Rope,
4365 range: Range<usize>,
4366 syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
4367 diagnostics: bool,
4368 buffer_snapshot: Option<&'a BufferSnapshot>,
4369 ) -> Self {
4370 let mut highlights = None;
4371 if let Some((captures, highlight_maps)) = syntax {
4372 highlights = Some(BufferChunkHighlights {
4373 captures,
4374 next_capture: None,
4375 stack: Default::default(),
4376 highlight_maps,
4377 })
4378 }
4379
4380 let diagnostic_endpoints = diagnostics.then(|| Vec::new().into_iter().peekable());
4381 let chunks = text.chunks_in_range(range.clone());
4382
4383 let mut this = BufferChunks {
4384 range,
4385 buffer_snapshot,
4386 chunks,
4387 diagnostic_endpoints,
4388 error_depth: 0,
4389 warning_depth: 0,
4390 information_depth: 0,
4391 hint_depth: 0,
4392 unnecessary_depth: 0,
4393 highlights,
4394 };
4395 this.initialize_diagnostic_endpoints();
4396 this
4397 }
4398
4399 /// Seeks to the given byte offset in the buffer.
4400 pub fn seek(&mut self, range: Range<usize>) {
4401 let old_range = std::mem::replace(&mut self.range, range.clone());
4402 self.chunks.set_range(self.range.clone());
4403 if let Some(highlights) = self.highlights.as_mut() {
4404 if old_range.start <= self.range.start && old_range.end >= self.range.end {
4405 // Reuse existing highlights stack, as the new range is a subrange of the old one.
4406 highlights
4407 .stack
4408 .retain(|(end_offset, _)| *end_offset > range.start);
4409 if let Some(capture) = &highlights.next_capture {
4410 if range.start >= capture.node.start_byte() {
4411 let next_capture_end = capture.node.end_byte();
4412 if range.start < next_capture_end {
4413 highlights.stack.push((
4414 next_capture_end,
4415 highlights.highlight_maps[capture.grammar_index].get(capture.index),
4416 ));
4417 }
4418 highlights.next_capture.take();
4419 }
4420 }
4421 } else if let Some(snapshot) = self.buffer_snapshot {
4422 let (captures, highlight_maps) = snapshot.get_highlights(self.range.clone());
4423 *highlights = BufferChunkHighlights {
4424 captures,
4425 next_capture: None,
4426 stack: Default::default(),
4427 highlight_maps,
4428 };
4429 } else {
4430 // We cannot obtain new highlights for a language-aware buffer iterator, as we don't have a buffer snapshot.
4431 // Seeking such BufferChunks is not supported.
4432 debug_assert!(
4433 false,
4434 "Attempted to seek on a language-aware buffer iterator without associated buffer snapshot"
4435 );
4436 }
4437
4438 highlights.captures.set_byte_range(self.range.clone());
4439 self.initialize_diagnostic_endpoints();
4440 }
4441 }
4442
4443 fn initialize_diagnostic_endpoints(&mut self) {
4444 if let Some(diagnostics) = self.diagnostic_endpoints.as_mut() {
4445 if let Some(buffer) = self.buffer_snapshot {
4446 let mut diagnostic_endpoints = Vec::new();
4447 for entry in buffer.diagnostics_in_range::<_, usize>(self.range.clone(), false) {
4448 diagnostic_endpoints.push(DiagnosticEndpoint {
4449 offset: entry.range.start,
4450 is_start: true,
4451 severity: entry.diagnostic.severity,
4452 is_unnecessary: entry.diagnostic.is_unnecessary,
4453 });
4454 diagnostic_endpoints.push(DiagnosticEndpoint {
4455 offset: entry.range.end,
4456 is_start: false,
4457 severity: entry.diagnostic.severity,
4458 is_unnecessary: entry.diagnostic.is_unnecessary,
4459 });
4460 }
4461 diagnostic_endpoints
4462 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
4463 *diagnostics = diagnostic_endpoints.into_iter().peekable();
4464 self.hint_depth = 0;
4465 self.error_depth = 0;
4466 self.warning_depth = 0;
4467 self.information_depth = 0;
4468 }
4469 }
4470 }
4471
4472 /// The current byte offset in the buffer.
4473 pub fn offset(&self) -> usize {
4474 self.range.start
4475 }
4476
4477 pub fn range(&self) -> Range<usize> {
4478 self.range.clone()
4479 }
4480
4481 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
4482 let depth = match endpoint.severity {
4483 DiagnosticSeverity::ERROR => &mut self.error_depth,
4484 DiagnosticSeverity::WARNING => &mut self.warning_depth,
4485 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
4486 DiagnosticSeverity::HINT => &mut self.hint_depth,
4487 _ => return,
4488 };
4489 if endpoint.is_start {
4490 *depth += 1;
4491 } else {
4492 *depth -= 1;
4493 }
4494
4495 if endpoint.is_unnecessary {
4496 if endpoint.is_start {
4497 self.unnecessary_depth += 1;
4498 } else {
4499 self.unnecessary_depth -= 1;
4500 }
4501 }
4502 }
4503
4504 fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
4505 if self.error_depth > 0 {
4506 Some(DiagnosticSeverity::ERROR)
4507 } else if self.warning_depth > 0 {
4508 Some(DiagnosticSeverity::WARNING)
4509 } else if self.information_depth > 0 {
4510 Some(DiagnosticSeverity::INFORMATION)
4511 } else if self.hint_depth > 0 {
4512 Some(DiagnosticSeverity::HINT)
4513 } else {
4514 None
4515 }
4516 }
4517
4518 fn current_code_is_unnecessary(&self) -> bool {
4519 self.unnecessary_depth > 0
4520 }
4521}
4522
4523impl<'a> Iterator for BufferChunks<'a> {
4524 type Item = Chunk<'a>;
4525
4526 fn next(&mut self) -> Option<Self::Item> {
4527 let mut next_capture_start = usize::MAX;
4528 let mut next_diagnostic_endpoint = usize::MAX;
4529
4530 if let Some(highlights) = self.highlights.as_mut() {
4531 while let Some((parent_capture_end, _)) = highlights.stack.last() {
4532 if *parent_capture_end <= self.range.start {
4533 highlights.stack.pop();
4534 } else {
4535 break;
4536 }
4537 }
4538
4539 if highlights.next_capture.is_none() {
4540 highlights.next_capture = highlights.captures.next();
4541 }
4542
4543 while let Some(capture) = highlights.next_capture.as_ref() {
4544 if self.range.start < capture.node.start_byte() {
4545 next_capture_start = capture.node.start_byte();
4546 break;
4547 } else {
4548 let highlight_id =
4549 highlights.highlight_maps[capture.grammar_index].get(capture.index);
4550 highlights
4551 .stack
4552 .push((capture.node.end_byte(), highlight_id));
4553 highlights.next_capture = highlights.captures.next();
4554 }
4555 }
4556 }
4557
4558 let mut diagnostic_endpoints = std::mem::take(&mut self.diagnostic_endpoints);
4559 if let Some(diagnostic_endpoints) = diagnostic_endpoints.as_mut() {
4560 while let Some(endpoint) = diagnostic_endpoints.peek().copied() {
4561 if endpoint.offset <= self.range.start {
4562 self.update_diagnostic_depths(endpoint);
4563 diagnostic_endpoints.next();
4564 } else {
4565 next_diagnostic_endpoint = endpoint.offset;
4566 break;
4567 }
4568 }
4569 }
4570 self.diagnostic_endpoints = diagnostic_endpoints;
4571
4572 if let Some(chunk) = self.chunks.peek() {
4573 let chunk_start = self.range.start;
4574 let mut chunk_end = (self.chunks.offset() + chunk.len())
4575 .min(next_capture_start)
4576 .min(next_diagnostic_endpoint);
4577 let mut highlight_id = None;
4578 if let Some(highlights) = self.highlights.as_ref() {
4579 if let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last() {
4580 chunk_end = chunk_end.min(*parent_capture_end);
4581 highlight_id = Some(*parent_highlight_id);
4582 }
4583 }
4584
4585 let slice =
4586 &chunk[chunk_start - self.chunks.offset()..chunk_end - self.chunks.offset()];
4587 self.range.start = chunk_end;
4588 if self.range.start == self.chunks.offset() + chunk.len() {
4589 self.chunks.next().unwrap();
4590 }
4591
4592 Some(Chunk {
4593 text: slice,
4594 syntax_highlight_id: highlight_id,
4595 diagnostic_severity: self.current_diagnostic_severity(),
4596 is_unnecessary: self.current_code_is_unnecessary(),
4597 ..Default::default()
4598 })
4599 } else {
4600 None
4601 }
4602 }
4603}
4604
4605impl operation_queue::Operation for Operation {
4606 fn lamport_timestamp(&self) -> clock::Lamport {
4607 match self {
4608 Operation::Buffer(_) => {
4609 unreachable!("buffer operations should never be deferred at this layer")
4610 }
4611 Operation::UpdateDiagnostics {
4612 lamport_timestamp, ..
4613 }
4614 | Operation::UpdateSelections {
4615 lamport_timestamp, ..
4616 }
4617 | Operation::UpdateCompletionTriggers {
4618 lamport_timestamp, ..
4619 } => *lamport_timestamp,
4620 }
4621 }
4622}
4623
4624impl Default for Diagnostic {
4625 fn default() -> Self {
4626 Self {
4627 source: Default::default(),
4628 code: None,
4629 code_description: None,
4630 severity: DiagnosticSeverity::ERROR,
4631 message: Default::default(),
4632 markdown: None,
4633 group_id: 0,
4634 is_primary: false,
4635 is_disk_based: false,
4636 is_unnecessary: false,
4637 data: None,
4638 }
4639 }
4640}
4641
4642impl IndentSize {
4643 /// Returns an [`IndentSize`] representing the given spaces.
4644 pub fn spaces(len: u32) -> Self {
4645 Self {
4646 len,
4647 kind: IndentKind::Space,
4648 }
4649 }
4650
4651 /// Returns an [`IndentSize`] representing a tab.
4652 pub fn tab() -> Self {
4653 Self {
4654 len: 1,
4655 kind: IndentKind::Tab,
4656 }
4657 }
4658
4659 /// An iterator over the characters represented by this [`IndentSize`].
4660 pub fn chars(&self) -> impl Iterator<Item = char> {
4661 iter::repeat(self.char()).take(self.len as usize)
4662 }
4663
4664 /// The character representation of this [`IndentSize`].
4665 pub fn char(&self) -> char {
4666 match self.kind {
4667 IndentKind::Space => ' ',
4668 IndentKind::Tab => '\t',
4669 }
4670 }
4671
4672 /// Consumes the current [`IndentSize`] and returns a new one that has
4673 /// been shrunk or enlarged by the given size along the given direction.
4674 pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
4675 match direction {
4676 Ordering::Less => {
4677 if self.kind == size.kind && self.len >= size.len {
4678 self.len -= size.len;
4679 }
4680 }
4681 Ordering::Equal => {}
4682 Ordering::Greater => {
4683 if self.len == 0 {
4684 self = size;
4685 } else if self.kind == size.kind {
4686 self.len += size.len;
4687 }
4688 }
4689 }
4690 self
4691 }
4692
4693 pub fn len_with_expanded_tabs(&self, tab_size: NonZeroU32) -> usize {
4694 match self.kind {
4695 IndentKind::Space => self.len as usize,
4696 IndentKind::Tab => self.len as usize * tab_size.get() as usize,
4697 }
4698 }
4699}
4700
4701#[cfg(any(test, feature = "test-support"))]
4702pub struct TestFile {
4703 pub path: Arc<Path>,
4704 pub root_name: String,
4705 pub local_root: Option<PathBuf>,
4706}
4707
4708#[cfg(any(test, feature = "test-support"))]
4709impl File for TestFile {
4710 fn path(&self) -> &Arc<Path> {
4711 &self.path
4712 }
4713
4714 fn full_path(&self, _: &gpui::App) -> PathBuf {
4715 PathBuf::from(&self.root_name).join(self.path.as_ref())
4716 }
4717
4718 fn as_local(&self) -> Option<&dyn LocalFile> {
4719 if self.local_root.is_some() {
4720 Some(self)
4721 } else {
4722 None
4723 }
4724 }
4725
4726 fn disk_state(&self) -> DiskState {
4727 unimplemented!()
4728 }
4729
4730 fn file_name<'a>(&'a self, _: &'a gpui::App) -> &'a std::ffi::OsStr {
4731 self.path().file_name().unwrap_or(self.root_name.as_ref())
4732 }
4733
4734 fn worktree_id(&self, _: &App) -> WorktreeId {
4735 WorktreeId::from_usize(0)
4736 }
4737
4738 fn to_proto(&self, _: &App) -> rpc::proto::File {
4739 unimplemented!()
4740 }
4741
4742 fn is_private(&self) -> bool {
4743 false
4744 }
4745}
4746
4747#[cfg(any(test, feature = "test-support"))]
4748impl LocalFile for TestFile {
4749 fn abs_path(&self, _cx: &App) -> PathBuf {
4750 PathBuf::from(self.local_root.as_ref().unwrap())
4751 .join(&self.root_name)
4752 .join(self.path.as_ref())
4753 }
4754
4755 fn load(&self, _cx: &App) -> Task<Result<String>> {
4756 unimplemented!()
4757 }
4758
4759 fn load_bytes(&self, _cx: &App) -> Task<Result<Vec<u8>>> {
4760 unimplemented!()
4761 }
4762}
4763
4764pub(crate) fn contiguous_ranges(
4765 values: impl Iterator<Item = u32>,
4766 max_len: usize,
4767) -> impl Iterator<Item = Range<u32>> {
4768 let mut values = values;
4769 let mut current_range: Option<Range<u32>> = None;
4770 std::iter::from_fn(move || {
4771 loop {
4772 if let Some(value) = values.next() {
4773 if let Some(range) = &mut current_range {
4774 if value == range.end && range.len() < max_len {
4775 range.end += 1;
4776 continue;
4777 }
4778 }
4779
4780 let prev_range = current_range.clone();
4781 current_range = Some(value..(value + 1));
4782 if prev_range.is_some() {
4783 return prev_range;
4784 }
4785 } else {
4786 return current_range.take();
4787 }
4788 }
4789 })
4790}
4791
4792#[derive(Default, Debug)]
4793pub struct CharClassifier {
4794 scope: Option<LanguageScope>,
4795 for_completion: bool,
4796 ignore_punctuation: bool,
4797}
4798
4799impl CharClassifier {
4800 pub fn new(scope: Option<LanguageScope>) -> Self {
4801 Self {
4802 scope,
4803 for_completion: false,
4804 ignore_punctuation: false,
4805 }
4806 }
4807
4808 pub fn for_completion(self, for_completion: bool) -> Self {
4809 Self {
4810 for_completion,
4811 ..self
4812 }
4813 }
4814
4815 pub fn ignore_punctuation(self, ignore_punctuation: bool) -> Self {
4816 Self {
4817 ignore_punctuation,
4818 ..self
4819 }
4820 }
4821
4822 pub fn is_whitespace(&self, c: char) -> bool {
4823 self.kind(c) == CharKind::Whitespace
4824 }
4825
4826 pub fn is_word(&self, c: char) -> bool {
4827 self.kind(c) == CharKind::Word
4828 }
4829
4830 pub fn is_punctuation(&self, c: char) -> bool {
4831 self.kind(c) == CharKind::Punctuation
4832 }
4833
4834 pub fn kind_with(&self, c: char, ignore_punctuation: bool) -> CharKind {
4835 if c.is_alphanumeric() || c == '_' {
4836 return CharKind::Word;
4837 }
4838
4839 if let Some(scope) = &self.scope {
4840 let characters = if self.for_completion {
4841 scope.completion_query_characters()
4842 } else {
4843 scope.word_characters()
4844 };
4845 if let Some(characters) = characters {
4846 if characters.contains(&c) {
4847 return CharKind::Word;
4848 }
4849 }
4850 }
4851
4852 if c.is_whitespace() {
4853 return CharKind::Whitespace;
4854 }
4855
4856 if ignore_punctuation {
4857 CharKind::Word
4858 } else {
4859 CharKind::Punctuation
4860 }
4861 }
4862
4863 pub fn kind(&self, c: char) -> CharKind {
4864 self.kind_with(c, self.ignore_punctuation)
4865 }
4866}
4867
4868/// Find all of the ranges of whitespace that occur at the ends of lines
4869/// in the given rope.
4870///
4871/// This could also be done with a regex search, but this implementation
4872/// avoids copying text.
4873pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
4874 let mut ranges = Vec::new();
4875
4876 let mut offset = 0;
4877 let mut prev_chunk_trailing_whitespace_range = 0..0;
4878 for chunk in rope.chunks() {
4879 let mut prev_line_trailing_whitespace_range = 0..0;
4880 for (i, line) in chunk.split('\n').enumerate() {
4881 let line_end_offset = offset + line.len();
4882 let trimmed_line_len = line.trim_end_matches([' ', '\t']).len();
4883 let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
4884
4885 if i == 0 && trimmed_line_len == 0 {
4886 trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
4887 }
4888 if !prev_line_trailing_whitespace_range.is_empty() {
4889 ranges.push(prev_line_trailing_whitespace_range);
4890 }
4891
4892 offset = line_end_offset + 1;
4893 prev_line_trailing_whitespace_range = trailing_whitespace_range;
4894 }
4895
4896 offset -= 1;
4897 prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
4898 }
4899
4900 if !prev_chunk_trailing_whitespace_range.is_empty() {
4901 ranges.push(prev_chunk_trailing_whitespace_range);
4902 }
4903
4904 ranges
4905}