1pub use crate::{
2 Grammar, Language, LanguageRegistry,
3 diagnostic_set::DiagnosticSet,
4 highlight_map::{HighlightId, HighlightMap},
5 proto,
6};
7use crate::{
8 LanguageScope, Outline, OutlineConfig, RunnableCapture, RunnableTag, TextObject,
9 TreeSitterOptions,
10 diagnostic_set::{DiagnosticEntry, DiagnosticGroup},
11 language_settings::{LanguageSettings, language_settings},
12 outline::OutlineItem,
13 syntax_map::{
14 SyntaxLayer, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures, SyntaxMapMatch,
15 SyntaxMapMatches, SyntaxSnapshot, ToTreeSitterPoint,
16 },
17 task_context::RunnableRange,
18 text_diff::text_diff,
19};
20use anyhow::{Context as _, Result};
21use async_watch as watch;
22pub use clock::ReplicaId;
23use clock::{AGENT_REPLICA_ID, Lamport};
24use collections::HashMap;
25use fs::MTime;
26use futures::channel::oneshot;
27use gpui::{
28 App, AppContext as _, Context, Entity, EventEmitter, HighlightStyle, SharedString, StyledText,
29 Task, TaskLabel, TextStyle,
30};
31use lsp::{LanguageServerId, NumberOrString};
32use parking_lot::Mutex;
33use schemars::JsonSchema;
34use serde::{Deserialize, Serialize};
35use serde_json::Value;
36use settings::WorktreeId;
37use smallvec::SmallVec;
38use smol::future::yield_now;
39use std::{
40 any::Any,
41 borrow::Cow,
42 cell::Cell,
43 cmp::{self, Ordering, Reverse},
44 collections::{BTreeMap, BTreeSet},
45 ffi::OsStr,
46 future::Future,
47 iter::{self, Iterator, Peekable},
48 mem,
49 num::NonZeroU32,
50 ops::{Deref, Range},
51 path::{Path, PathBuf},
52 rc,
53 sync::{Arc, LazyLock},
54 time::{Duration, Instant},
55 vec,
56};
57use sum_tree::TreeMap;
58use text::operation_queue::OperationQueue;
59use text::*;
60pub use text::{
61 Anchor, Bias, Buffer as TextBuffer, BufferId, BufferSnapshot as TextBufferSnapshot, Edit,
62 LineIndent, OffsetRangeExt, OffsetUtf16, Patch, Point, PointUtf16, Rope, Selection,
63 SelectionGoal, Subscription, TextDimension, TextSummary, ToOffset, ToOffsetUtf16, ToPoint,
64 ToPointUtf16, Transaction, TransactionId, Unclipped,
65};
66use theme::{ActiveTheme as _, SyntaxTheme};
67#[cfg(any(test, feature = "test-support"))]
68use util::RandomCharIter;
69use util::{RangeExt, debug_panic, maybe};
70
71#[cfg(any(test, feature = "test-support"))]
72pub use {tree_sitter_python, tree_sitter_rust, tree_sitter_typescript};
73
74pub use lsp::DiagnosticSeverity;
75
76/// A label for the background task spawned by the buffer to compute
77/// a diff against the contents of its file.
78pub static BUFFER_DIFF_TASK: LazyLock<TaskLabel> = LazyLock::new(TaskLabel::new);
79
80/// Indicate whether a [`Buffer`] has permissions to edit.
81#[derive(PartialEq, Clone, Copy, Debug)]
82pub enum Capability {
83 /// The buffer is a mutable replica.
84 ReadWrite,
85 /// The buffer is a read-only replica.
86 ReadOnly,
87}
88
89pub type BufferRow = u32;
90
91/// An in-memory representation of a source code file, including its text,
92/// syntax trees, git status, and diagnostics.
93pub struct Buffer {
94 text: TextBuffer,
95 branch_state: Option<BufferBranchState>,
96 /// Filesystem state, `None` when there is no path.
97 file: Option<Arc<dyn File>>,
98 /// The mtime of the file when this buffer was last loaded from
99 /// or saved to disk.
100 saved_mtime: Option<MTime>,
101 /// The version vector when this buffer was last loaded from
102 /// or saved to disk.
103 saved_version: clock::Global,
104 preview_version: clock::Global,
105 transaction_depth: usize,
106 was_dirty_before_starting_transaction: Option<bool>,
107 reload_task: Option<Task<Result<()>>>,
108 language: Option<Arc<Language>>,
109 autoindent_requests: Vec<Arc<AutoindentRequest>>,
110 pending_autoindent: Option<Task<()>>,
111 sync_parse_timeout: Duration,
112 syntax_map: Mutex<SyntaxMap>,
113 reparse: Option<Task<()>>,
114 parse_status: (watch::Sender<ParseStatus>, watch::Receiver<ParseStatus>),
115 non_text_state_update_count: usize,
116 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
117 remote_selections: TreeMap<ReplicaId, SelectionSet>,
118 diagnostics_timestamp: clock::Lamport,
119 completion_triggers: BTreeSet<String>,
120 completion_triggers_per_language_server: HashMap<LanguageServerId, BTreeSet<String>>,
121 completion_triggers_timestamp: clock::Lamport,
122 deferred_ops: OperationQueue<Operation>,
123 capability: Capability,
124 has_conflict: bool,
125 /// Memoize calls to has_changes_since(saved_version).
126 /// The contents of a cell are (self.version, has_changes) at the time of a last call.
127 has_unsaved_edits: Cell<(clock::Global, bool)>,
128 change_bits: Vec<rc::Weak<Cell<bool>>>,
129 _subscriptions: Vec<gpui::Subscription>,
130}
131
132#[derive(Copy, Clone, Debug, PartialEq, Eq)]
133pub enum ParseStatus {
134 Idle,
135 Parsing,
136}
137
138struct BufferBranchState {
139 base_buffer: Entity<Buffer>,
140 merged_operations: Vec<Lamport>,
141}
142
143/// An immutable, cheaply cloneable representation of a fixed
144/// state of a buffer.
145pub struct BufferSnapshot {
146 pub text: text::BufferSnapshot,
147 pub(crate) syntax: SyntaxSnapshot,
148 file: Option<Arc<dyn File>>,
149 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
150 remote_selections: TreeMap<ReplicaId, SelectionSet>,
151 language: Option<Arc<Language>>,
152 non_text_state_update_count: usize,
153}
154
155/// The kind and amount of indentation in a particular line. For now,
156/// assumes that indentation is all the same character.
157#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
158pub struct IndentSize {
159 /// The number of bytes that comprise the indentation.
160 pub len: u32,
161 /// The kind of whitespace used for indentation.
162 pub kind: IndentKind,
163}
164
165/// A whitespace character that's used for indentation.
166#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
167pub enum IndentKind {
168 /// An ASCII space character.
169 #[default]
170 Space,
171 /// An ASCII tab character.
172 Tab,
173}
174
175/// The shape of a selection cursor.
176#[derive(Copy, Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq, JsonSchema)]
177#[serde(rename_all = "snake_case")]
178pub enum CursorShape {
179 /// A vertical bar
180 #[default]
181 Bar,
182 /// A block that surrounds the following character
183 Block,
184 /// An underline that runs along the following character
185 Underline,
186 /// A box drawn around the following character
187 Hollow,
188}
189
190#[derive(Clone, Debug)]
191struct SelectionSet {
192 line_mode: bool,
193 cursor_shape: CursorShape,
194 selections: Arc<[Selection<Anchor>]>,
195 lamport_timestamp: clock::Lamport,
196}
197
198/// A diagnostic associated with a certain range of a buffer.
199#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
200pub struct Diagnostic {
201 /// The name of the service that produced this diagnostic.
202 pub source: Option<String>,
203 /// A machine-readable code that identifies this diagnostic.
204 pub code: Option<NumberOrString>,
205 pub code_description: Option<lsp::Url>,
206 /// Whether this diagnostic is a hint, warning, or error.
207 pub severity: DiagnosticSeverity,
208 /// The human-readable message associated with this diagnostic.
209 pub message: String,
210 /// The human-readable message (in markdown format)
211 pub markdown: Option<String>,
212 /// An id that identifies the group to which this diagnostic belongs.
213 ///
214 /// When a language server produces a diagnostic with
215 /// one or more associated diagnostics, those diagnostics are all
216 /// assigned a single group ID.
217 pub group_id: usize,
218 /// Whether this diagnostic is the primary diagnostic for its group.
219 ///
220 /// In a given group, the primary diagnostic is the top-level diagnostic
221 /// returned by the language server. The non-primary diagnostics are the
222 /// associated diagnostics.
223 pub is_primary: bool,
224 /// Whether this diagnostic is considered to originate from an analysis of
225 /// files on disk, as opposed to any unsaved buffer contents. This is a
226 /// property of a given diagnostic source, and is configured for a given
227 /// language server via the [`LspAdapter::disk_based_diagnostic_sources`](crate::LspAdapter::disk_based_diagnostic_sources) method
228 /// for the language server.
229 pub is_disk_based: bool,
230 /// Whether this diagnostic marks unnecessary code.
231 pub is_unnecessary: bool,
232 /// Data from language server that produced this diagnostic. Passed back to the LS when we request code actions for this diagnostic.
233 pub data: Option<Value>,
234}
235
236/// An operation used to synchronize this buffer with its other replicas.
237#[derive(Clone, Debug, PartialEq)]
238pub enum Operation {
239 /// A text operation.
240 Buffer(text::Operation),
241
242 /// An update to the buffer's diagnostics.
243 UpdateDiagnostics {
244 /// The id of the language server that produced the new diagnostics.
245 server_id: LanguageServerId,
246 /// The diagnostics.
247 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
248 /// The buffer's lamport timestamp.
249 lamport_timestamp: clock::Lamport,
250 },
251
252 /// An update to the most recent selections in this buffer.
253 UpdateSelections {
254 /// The selections.
255 selections: Arc<[Selection<Anchor>]>,
256 /// The buffer's lamport timestamp.
257 lamport_timestamp: clock::Lamport,
258 /// Whether the selections are in 'line mode'.
259 line_mode: bool,
260 /// The [`CursorShape`] associated with these selections.
261 cursor_shape: CursorShape,
262 },
263
264 /// An update to the characters that should trigger autocompletion
265 /// for this buffer.
266 UpdateCompletionTriggers {
267 /// The characters that trigger autocompletion.
268 triggers: Vec<String>,
269 /// The buffer's lamport timestamp.
270 lamport_timestamp: clock::Lamport,
271 /// The language server ID.
272 server_id: LanguageServerId,
273 },
274}
275
276/// An event that occurs in a buffer.
277#[derive(Clone, Debug, PartialEq)]
278pub enum BufferEvent {
279 /// The buffer was changed in a way that must be
280 /// propagated to its other replicas.
281 Operation {
282 operation: Operation,
283 is_local: bool,
284 },
285 /// The buffer was edited.
286 Edited,
287 /// The buffer's `dirty` bit changed.
288 DirtyChanged,
289 /// The buffer was saved.
290 Saved,
291 /// The buffer's file was changed on disk.
292 FileHandleChanged,
293 /// The buffer was reloaded.
294 Reloaded,
295 /// The buffer is in need of a reload
296 ReloadNeeded,
297 /// The buffer's language was changed.
298 LanguageChanged,
299 /// The buffer's syntax trees were updated.
300 Reparsed,
301 /// The buffer's diagnostics were updated.
302 DiagnosticsUpdated,
303 /// The buffer gained or lost editing capabilities.
304 CapabilityChanged,
305 /// The buffer was explicitly requested to close.
306 Closed,
307 /// The buffer was discarded when closing.
308 Discarded,
309}
310
311/// The file associated with a buffer.
312pub trait File: Send + Sync + Any {
313 /// Returns the [`LocalFile`] associated with this file, if the
314 /// file is local.
315 fn as_local(&self) -> Option<&dyn LocalFile>;
316
317 /// Returns whether this file is local.
318 fn is_local(&self) -> bool {
319 self.as_local().is_some()
320 }
321
322 /// Returns whether the file is new, exists in storage, or has been deleted. Includes metadata
323 /// only available in some states, such as modification time.
324 fn disk_state(&self) -> DiskState;
325
326 /// Returns the path of this file relative to the worktree's root directory.
327 fn path(&self) -> &Arc<Path>;
328
329 /// Returns the path of this file relative to the worktree's parent directory (this means it
330 /// includes the name of the worktree's root folder).
331 fn full_path(&self, cx: &App) -> PathBuf;
332
333 /// Returns the last component of this handle's absolute path. If this handle refers to the root
334 /// of its worktree, then this method will return the name of the worktree itself.
335 fn file_name<'a>(&'a self, cx: &'a App) -> &'a OsStr;
336
337 /// Returns the id of the worktree to which this file belongs.
338 ///
339 /// This is needed for looking up project-specific settings.
340 fn worktree_id(&self, cx: &App) -> WorktreeId;
341
342 /// Converts this file into a protobuf message.
343 fn to_proto(&self, cx: &App) -> rpc::proto::File;
344
345 /// Return whether Zed considers this to be a private file.
346 fn is_private(&self) -> bool;
347}
348
349/// The file's storage status - whether it's stored (`Present`), and if so when it was last
350/// modified. In the case where the file is not stored, it can be either `New` or `Deleted`. In the
351/// UI these two states are distinguished. For example, the buffer tab does not display a deletion
352/// indicator for new files.
353#[derive(Copy, Clone, Debug, PartialEq)]
354pub enum DiskState {
355 /// File created in Zed that has not been saved.
356 New,
357 /// File present on the filesystem.
358 Present { mtime: MTime },
359 /// Deleted file that was previously present.
360 Deleted,
361}
362
363impl DiskState {
364 /// Returns the file's last known modification time on disk.
365 pub fn mtime(self) -> Option<MTime> {
366 match self {
367 DiskState::New => None,
368 DiskState::Present { mtime } => Some(mtime),
369 DiskState::Deleted => None,
370 }
371 }
372
373 pub fn exists(&self) -> bool {
374 match self {
375 DiskState::New => false,
376 DiskState::Present { .. } => true,
377 DiskState::Deleted => false,
378 }
379 }
380}
381
382/// The file associated with a buffer, in the case where the file is on the local disk.
383pub trait LocalFile: File {
384 /// Returns the absolute path of this file
385 fn abs_path(&self, cx: &App) -> PathBuf;
386
387 /// Loads the file contents from disk and returns them as a UTF-8 encoded string.
388 fn load(&self, cx: &App) -> Task<Result<String>>;
389
390 /// Loads the file's contents from disk.
391 fn load_bytes(&self, cx: &App) -> Task<Result<Vec<u8>>>;
392}
393
394/// The auto-indent behavior associated with an editing operation.
395/// For some editing operations, each affected line of text has its
396/// indentation recomputed. For other operations, the entire block
397/// of edited text is adjusted uniformly.
398#[derive(Clone, Debug)]
399pub enum AutoindentMode {
400 /// Indent each line of inserted text.
401 EachLine,
402 /// Apply the same indentation adjustment to all of the lines
403 /// in a given insertion.
404 Block {
405 /// The original indentation column of the first line of each
406 /// insertion, if it has been copied.
407 ///
408 /// Knowing this makes it possible to preserve the relative indentation
409 /// of every line in the insertion from when it was copied.
410 ///
411 /// If the original indent column is `a`, and the first line of insertion
412 /// is then auto-indented to column `b`, then every other line of
413 /// the insertion will be auto-indented to column `b - a`
414 original_indent_columns: Vec<Option<u32>>,
415 },
416}
417
418#[derive(Clone)]
419struct AutoindentRequest {
420 before_edit: BufferSnapshot,
421 entries: Vec<AutoindentRequestEntry>,
422 is_block_mode: bool,
423 ignore_empty_lines: bool,
424}
425
426#[derive(Debug, Clone)]
427struct AutoindentRequestEntry {
428 /// A range of the buffer whose indentation should be adjusted.
429 range: Range<Anchor>,
430 /// Whether or not these lines should be considered brand new, for the
431 /// purpose of auto-indent. When text is not new, its indentation will
432 /// only be adjusted if the suggested indentation level has *changed*
433 /// since the edit was made.
434 first_line_is_new: bool,
435 indent_size: IndentSize,
436 original_indent_column: Option<u32>,
437}
438
439#[derive(Debug)]
440struct IndentSuggestion {
441 basis_row: u32,
442 delta: Ordering,
443 within_error: bool,
444}
445
446struct BufferChunkHighlights<'a> {
447 captures: SyntaxMapCaptures<'a>,
448 next_capture: Option<SyntaxMapCapture<'a>>,
449 stack: Vec<(usize, HighlightId)>,
450 highlight_maps: Vec<HighlightMap>,
451}
452
453/// An iterator that yields chunks of a buffer's text, along with their
454/// syntax highlights and diagnostic status.
455pub struct BufferChunks<'a> {
456 buffer_snapshot: Option<&'a BufferSnapshot>,
457 range: Range<usize>,
458 chunks: text::Chunks<'a>,
459 diagnostic_endpoints: Option<Peekable<vec::IntoIter<DiagnosticEndpoint>>>,
460 error_depth: usize,
461 warning_depth: usize,
462 information_depth: usize,
463 hint_depth: usize,
464 unnecessary_depth: usize,
465 highlights: Option<BufferChunkHighlights<'a>>,
466}
467
468/// A chunk of a buffer's text, along with its syntax highlight and
469/// diagnostic status.
470#[derive(Clone, Debug, Default)]
471pub struct Chunk<'a> {
472 /// The text of the chunk.
473 pub text: &'a str,
474 /// The syntax highlighting style of the chunk.
475 pub syntax_highlight_id: Option<HighlightId>,
476 /// The highlight style that has been applied to this chunk in
477 /// the editor.
478 pub highlight_style: Option<HighlightStyle>,
479 /// The severity of diagnostic associated with this chunk, if any.
480 pub diagnostic_severity: Option<DiagnosticSeverity>,
481 /// Whether this chunk of text is marked as unnecessary.
482 pub is_unnecessary: bool,
483 /// Whether this chunk of text was originally a tab character.
484 pub is_tab: bool,
485}
486
487/// A set of edits to a given version of a buffer, computed asynchronously.
488#[derive(Debug)]
489pub struct Diff {
490 pub base_version: clock::Global,
491 pub line_ending: LineEnding,
492 pub edits: Vec<(Range<usize>, Arc<str>)>,
493}
494
495#[derive(Debug, Clone, Copy)]
496pub(crate) struct DiagnosticEndpoint {
497 offset: usize,
498 is_start: bool,
499 severity: DiagnosticSeverity,
500 is_unnecessary: bool,
501}
502
503/// A class of characters, used for characterizing a run of text.
504#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
505pub enum CharKind {
506 /// Whitespace.
507 Whitespace,
508 /// Punctuation.
509 Punctuation,
510 /// Word.
511 Word,
512}
513
514/// A runnable is a set of data about a region that could be resolved into a task
515pub struct Runnable {
516 pub tags: SmallVec<[RunnableTag; 1]>,
517 pub language: Arc<Language>,
518 pub buffer: BufferId,
519}
520
521#[derive(Default, Clone, Debug)]
522pub struct HighlightedText {
523 pub text: SharedString,
524 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
525}
526
527#[derive(Default, Debug)]
528struct HighlightedTextBuilder {
529 pub text: String,
530 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
531}
532
533impl HighlightedText {
534 pub fn from_buffer_range<T: ToOffset>(
535 range: Range<T>,
536 snapshot: &text::BufferSnapshot,
537 syntax_snapshot: &SyntaxSnapshot,
538 override_style: Option<HighlightStyle>,
539 syntax_theme: &SyntaxTheme,
540 ) -> Self {
541 let mut highlighted_text = HighlightedTextBuilder::default();
542 highlighted_text.add_text_from_buffer_range(
543 range,
544 snapshot,
545 syntax_snapshot,
546 override_style,
547 syntax_theme,
548 );
549 highlighted_text.build()
550 }
551
552 pub fn to_styled_text(&self, default_style: &TextStyle) -> StyledText {
553 gpui::StyledText::new(self.text.clone())
554 .with_default_highlights(default_style, self.highlights.iter().cloned())
555 }
556
557 /// Returns the first line without leading whitespace unless highlighted
558 /// and a boolean indicating if there are more lines after
559 pub fn first_line_preview(self) -> (Self, bool) {
560 let newline_ix = self.text.find('\n').unwrap_or(self.text.len());
561 let first_line = &self.text[..newline_ix];
562
563 // Trim leading whitespace, unless an edit starts prior to it.
564 let mut preview_start_ix = first_line.len() - first_line.trim_start().len();
565 if let Some((first_highlight_range, _)) = self.highlights.first() {
566 preview_start_ix = preview_start_ix.min(first_highlight_range.start);
567 }
568
569 let preview_text = &first_line[preview_start_ix..];
570 let preview_highlights = self
571 .highlights
572 .into_iter()
573 .take_while(|(range, _)| range.start < newline_ix)
574 .filter_map(|(mut range, highlight)| {
575 range.start = range.start.saturating_sub(preview_start_ix);
576 range.end = range.end.saturating_sub(preview_start_ix).min(newline_ix);
577 if range.is_empty() {
578 None
579 } else {
580 Some((range, highlight))
581 }
582 });
583
584 let preview = Self {
585 text: SharedString::new(preview_text),
586 highlights: preview_highlights.collect(),
587 };
588
589 (preview, self.text.len() > newline_ix)
590 }
591}
592
593impl HighlightedTextBuilder {
594 pub fn build(self) -> HighlightedText {
595 HighlightedText {
596 text: self.text.into(),
597 highlights: self.highlights,
598 }
599 }
600
601 pub fn add_text_from_buffer_range<T: ToOffset>(
602 &mut self,
603 range: Range<T>,
604 snapshot: &text::BufferSnapshot,
605 syntax_snapshot: &SyntaxSnapshot,
606 override_style: Option<HighlightStyle>,
607 syntax_theme: &SyntaxTheme,
608 ) {
609 let range = range.to_offset(snapshot);
610 for chunk in Self::highlighted_chunks(range, snapshot, syntax_snapshot) {
611 let start = self.text.len();
612 self.text.push_str(chunk.text);
613 let end = self.text.len();
614
615 if let Some(mut highlight_style) = chunk
616 .syntax_highlight_id
617 .and_then(|id| id.style(syntax_theme))
618 {
619 if let Some(override_style) = override_style {
620 highlight_style.highlight(override_style);
621 }
622 self.highlights.push((start..end, highlight_style));
623 } else if let Some(override_style) = override_style {
624 self.highlights.push((start..end, override_style));
625 }
626 }
627 }
628
629 fn highlighted_chunks<'a>(
630 range: Range<usize>,
631 snapshot: &'a text::BufferSnapshot,
632 syntax_snapshot: &'a SyntaxSnapshot,
633 ) -> BufferChunks<'a> {
634 let captures = syntax_snapshot.captures(range.clone(), snapshot, |grammar| {
635 grammar.highlights_query.as_ref()
636 });
637
638 let highlight_maps = captures
639 .grammars()
640 .iter()
641 .map(|grammar| grammar.highlight_map())
642 .collect();
643
644 BufferChunks::new(
645 snapshot.as_rope(),
646 range,
647 Some((captures, highlight_maps)),
648 false,
649 None,
650 )
651 }
652}
653
654#[derive(Clone)]
655pub struct EditPreview {
656 old_snapshot: text::BufferSnapshot,
657 applied_edits_snapshot: text::BufferSnapshot,
658 syntax_snapshot: SyntaxSnapshot,
659}
660
661impl EditPreview {
662 pub fn highlight_edits(
663 &self,
664 current_snapshot: &BufferSnapshot,
665 edits: &[(Range<Anchor>, String)],
666 include_deletions: bool,
667 cx: &App,
668 ) -> HighlightedText {
669 let Some(visible_range_in_preview_snapshot) = self.compute_visible_range(edits) else {
670 return HighlightedText::default();
671 };
672
673 let mut highlighted_text = HighlightedTextBuilder::default();
674
675 let mut offset_in_preview_snapshot = visible_range_in_preview_snapshot.start;
676
677 let insertion_highlight_style = HighlightStyle {
678 background_color: Some(cx.theme().status().created_background),
679 ..Default::default()
680 };
681 let deletion_highlight_style = HighlightStyle {
682 background_color: Some(cx.theme().status().deleted_background),
683 ..Default::default()
684 };
685 let syntax_theme = cx.theme().syntax();
686
687 for (range, edit_text) in edits {
688 let edit_new_end_in_preview_snapshot = range
689 .end
690 .bias_right(&self.old_snapshot)
691 .to_offset(&self.applied_edits_snapshot);
692 let edit_start_in_preview_snapshot = edit_new_end_in_preview_snapshot - edit_text.len();
693
694 let unchanged_range_in_preview_snapshot =
695 offset_in_preview_snapshot..edit_start_in_preview_snapshot;
696 if !unchanged_range_in_preview_snapshot.is_empty() {
697 highlighted_text.add_text_from_buffer_range(
698 unchanged_range_in_preview_snapshot,
699 &self.applied_edits_snapshot,
700 &self.syntax_snapshot,
701 None,
702 &syntax_theme,
703 );
704 }
705
706 let range_in_current_snapshot = range.to_offset(current_snapshot);
707 if include_deletions && !range_in_current_snapshot.is_empty() {
708 highlighted_text.add_text_from_buffer_range(
709 range_in_current_snapshot,
710 ¤t_snapshot.text,
711 ¤t_snapshot.syntax,
712 Some(deletion_highlight_style),
713 &syntax_theme,
714 );
715 }
716
717 if !edit_text.is_empty() {
718 highlighted_text.add_text_from_buffer_range(
719 edit_start_in_preview_snapshot..edit_new_end_in_preview_snapshot,
720 &self.applied_edits_snapshot,
721 &self.syntax_snapshot,
722 Some(insertion_highlight_style),
723 &syntax_theme,
724 );
725 }
726
727 offset_in_preview_snapshot = edit_new_end_in_preview_snapshot;
728 }
729
730 highlighted_text.add_text_from_buffer_range(
731 offset_in_preview_snapshot..visible_range_in_preview_snapshot.end,
732 &self.applied_edits_snapshot,
733 &self.syntax_snapshot,
734 None,
735 &syntax_theme,
736 );
737
738 highlighted_text.build()
739 }
740
741 fn compute_visible_range(&self, edits: &[(Range<Anchor>, String)]) -> Option<Range<usize>> {
742 let (first, _) = edits.first()?;
743 let (last, _) = edits.last()?;
744
745 let start = first
746 .start
747 .bias_left(&self.old_snapshot)
748 .to_point(&self.applied_edits_snapshot);
749 let end = last
750 .end
751 .bias_right(&self.old_snapshot)
752 .to_point(&self.applied_edits_snapshot);
753
754 // Ensure that the first line of the first edit and the last line of the last edit are always fully visible
755 let range = Point::new(start.row, 0)
756 ..Point::new(end.row, self.applied_edits_snapshot.line_len(end.row));
757
758 Some(range.to_offset(&self.applied_edits_snapshot))
759 }
760}
761
762#[derive(Clone, Debug, PartialEq, Eq)]
763pub struct BracketMatch {
764 pub open_range: Range<usize>,
765 pub close_range: Range<usize>,
766 pub newline_only: bool,
767}
768
769impl Buffer {
770 /// Create a new buffer with the given base text.
771 pub fn local<T: Into<String>>(base_text: T, cx: &Context<Self>) -> Self {
772 Self::build(
773 TextBuffer::new(0, cx.entity_id().as_non_zero_u64().into(), base_text.into()),
774 None,
775 Capability::ReadWrite,
776 )
777 }
778
779 /// Create a new buffer with the given base text that has proper line endings and other normalization applied.
780 pub fn local_normalized(
781 base_text_normalized: Rope,
782 line_ending: LineEnding,
783 cx: &Context<Self>,
784 ) -> Self {
785 Self::build(
786 TextBuffer::new_normalized(
787 0,
788 cx.entity_id().as_non_zero_u64().into(),
789 line_ending,
790 base_text_normalized,
791 ),
792 None,
793 Capability::ReadWrite,
794 )
795 }
796
797 /// Create a new buffer that is a replica of a remote buffer.
798 pub fn remote(
799 remote_id: BufferId,
800 replica_id: ReplicaId,
801 capability: Capability,
802 base_text: impl Into<String>,
803 ) -> Self {
804 Self::build(
805 TextBuffer::new(replica_id, remote_id, base_text.into()),
806 None,
807 capability,
808 )
809 }
810
811 /// Create a new buffer that is a replica of a remote buffer, populating its
812 /// state from the given protobuf message.
813 pub fn from_proto(
814 replica_id: ReplicaId,
815 capability: Capability,
816 message: proto::BufferState,
817 file: Option<Arc<dyn File>>,
818 ) -> Result<Self> {
819 let buffer_id = BufferId::new(message.id).context("Could not deserialize buffer_id")?;
820 let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text);
821 let mut this = Self::build(buffer, file, capability);
822 this.text.set_line_ending(proto::deserialize_line_ending(
823 rpc::proto::LineEnding::from_i32(message.line_ending).context("missing line_ending")?,
824 ));
825 this.saved_version = proto::deserialize_version(&message.saved_version);
826 this.saved_mtime = message.saved_mtime.map(|time| time.into());
827 Ok(this)
828 }
829
830 /// Serialize the buffer's state to a protobuf message.
831 pub fn to_proto(&self, cx: &App) -> proto::BufferState {
832 proto::BufferState {
833 id: self.remote_id().into(),
834 file: self.file.as_ref().map(|f| f.to_proto(cx)),
835 base_text: self.base_text().to_string(),
836 line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
837 saved_version: proto::serialize_version(&self.saved_version),
838 saved_mtime: self.saved_mtime.map(|time| time.into()),
839 }
840 }
841
842 /// Serialize as protobufs all of the changes to the buffer since the given version.
843 pub fn serialize_ops(
844 &self,
845 since: Option<clock::Global>,
846 cx: &App,
847 ) -> Task<Vec<proto::Operation>> {
848 let mut operations = Vec::new();
849 operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
850
851 operations.extend(self.remote_selections.iter().map(|(_, set)| {
852 proto::serialize_operation(&Operation::UpdateSelections {
853 selections: set.selections.clone(),
854 lamport_timestamp: set.lamport_timestamp,
855 line_mode: set.line_mode,
856 cursor_shape: set.cursor_shape,
857 })
858 }));
859
860 for (server_id, diagnostics) in &self.diagnostics {
861 operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
862 lamport_timestamp: self.diagnostics_timestamp,
863 server_id: *server_id,
864 diagnostics: diagnostics.iter().cloned().collect(),
865 }));
866 }
867
868 for (server_id, completions) in &self.completion_triggers_per_language_server {
869 operations.push(proto::serialize_operation(
870 &Operation::UpdateCompletionTriggers {
871 triggers: completions.iter().cloned().collect(),
872 lamport_timestamp: self.completion_triggers_timestamp,
873 server_id: *server_id,
874 },
875 ));
876 }
877
878 let text_operations = self.text.operations().clone();
879 cx.background_spawn(async move {
880 let since = since.unwrap_or_default();
881 operations.extend(
882 text_operations
883 .iter()
884 .filter(|(_, op)| !since.observed(op.timestamp()))
885 .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
886 );
887 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
888 operations
889 })
890 }
891
892 /// Assign a language to the buffer, returning the buffer.
893 pub fn with_language(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
894 self.set_language(Some(language), cx);
895 self
896 }
897
898 /// Returns the [`Capability`] of this buffer.
899 pub fn capability(&self) -> Capability {
900 self.capability
901 }
902
903 /// Whether this buffer can only be read.
904 pub fn read_only(&self) -> bool {
905 self.capability == Capability::ReadOnly
906 }
907
908 /// Builds a [`Buffer`] with the given underlying [`TextBuffer`], diff base, [`File`] and [`Capability`].
909 pub fn build(buffer: TextBuffer, file: Option<Arc<dyn File>>, capability: Capability) -> Self {
910 let saved_mtime = file.as_ref().and_then(|file| file.disk_state().mtime());
911 let snapshot = buffer.snapshot();
912 let syntax_map = Mutex::new(SyntaxMap::new(&snapshot));
913 Self {
914 saved_mtime,
915 saved_version: buffer.version(),
916 preview_version: buffer.version(),
917 reload_task: None,
918 transaction_depth: 0,
919 was_dirty_before_starting_transaction: None,
920 has_unsaved_edits: Cell::new((buffer.version(), false)),
921 text: buffer,
922 branch_state: None,
923 file,
924 capability,
925 syntax_map,
926 reparse: None,
927 non_text_state_update_count: 0,
928 sync_parse_timeout: Duration::from_millis(1),
929 parse_status: async_watch::channel(ParseStatus::Idle),
930 autoindent_requests: Default::default(),
931 pending_autoindent: Default::default(),
932 language: None,
933 remote_selections: Default::default(),
934 diagnostics: Default::default(),
935 diagnostics_timestamp: Default::default(),
936 completion_triggers: Default::default(),
937 completion_triggers_per_language_server: Default::default(),
938 completion_triggers_timestamp: Default::default(),
939 deferred_ops: OperationQueue::new(),
940 has_conflict: false,
941 change_bits: Default::default(),
942 _subscriptions: Vec::new(),
943 }
944 }
945
946 pub fn build_snapshot(
947 text: Rope,
948 language: Option<Arc<Language>>,
949 language_registry: Option<Arc<LanguageRegistry>>,
950 cx: &mut App,
951 ) -> impl Future<Output = BufferSnapshot> + use<> {
952 let entity_id = cx.reserve_entity::<Self>().entity_id();
953 let buffer_id = entity_id.as_non_zero_u64().into();
954 async move {
955 let text =
956 TextBuffer::new_normalized(0, buffer_id, Default::default(), text).snapshot();
957 let mut syntax = SyntaxMap::new(&text).snapshot();
958 if let Some(language) = language.clone() {
959 let text = text.clone();
960 let language = language.clone();
961 let language_registry = language_registry.clone();
962 syntax.reparse(&text, language_registry, language);
963 }
964 BufferSnapshot {
965 text,
966 syntax,
967 file: None,
968 diagnostics: Default::default(),
969 remote_selections: Default::default(),
970 language,
971 non_text_state_update_count: 0,
972 }
973 }
974 }
975
976 pub fn build_empty_snapshot(cx: &mut App) -> BufferSnapshot {
977 let entity_id = cx.reserve_entity::<Self>().entity_id();
978 let buffer_id = entity_id.as_non_zero_u64().into();
979 let text =
980 TextBuffer::new_normalized(0, buffer_id, Default::default(), Rope::new()).snapshot();
981 let syntax = SyntaxMap::new(&text).snapshot();
982 BufferSnapshot {
983 text,
984 syntax,
985 file: None,
986 diagnostics: Default::default(),
987 remote_selections: Default::default(),
988 language: None,
989 non_text_state_update_count: 0,
990 }
991 }
992
993 #[cfg(any(test, feature = "test-support"))]
994 pub fn build_snapshot_sync(
995 text: Rope,
996 language: Option<Arc<Language>>,
997 language_registry: Option<Arc<LanguageRegistry>>,
998 cx: &mut App,
999 ) -> BufferSnapshot {
1000 let entity_id = cx.reserve_entity::<Self>().entity_id();
1001 let buffer_id = entity_id.as_non_zero_u64().into();
1002 let text = TextBuffer::new_normalized(0, buffer_id, Default::default(), text).snapshot();
1003 let mut syntax = SyntaxMap::new(&text).snapshot();
1004 if let Some(language) = language.clone() {
1005 let text = text.clone();
1006 let language = language.clone();
1007 let language_registry = language_registry.clone();
1008 syntax.reparse(&text, language_registry, language);
1009 }
1010 BufferSnapshot {
1011 text,
1012 syntax,
1013 file: None,
1014 diagnostics: Default::default(),
1015 remote_selections: Default::default(),
1016 language,
1017 non_text_state_update_count: 0,
1018 }
1019 }
1020
1021 /// Retrieve a snapshot of the buffer's current state. This is computationally
1022 /// cheap, and allows reading from the buffer on a background thread.
1023 pub fn snapshot(&self) -> BufferSnapshot {
1024 let text = self.text.snapshot();
1025 let mut syntax_map = self.syntax_map.lock();
1026 syntax_map.interpolate(&text);
1027 let syntax = syntax_map.snapshot();
1028
1029 BufferSnapshot {
1030 text,
1031 syntax,
1032 file: self.file.clone(),
1033 remote_selections: self.remote_selections.clone(),
1034 diagnostics: self.diagnostics.clone(),
1035 language: self.language.clone(),
1036 non_text_state_update_count: self.non_text_state_update_count,
1037 }
1038 }
1039
1040 pub fn branch(&mut self, cx: &mut Context<Self>) -> Entity<Self> {
1041 let this = cx.entity();
1042 cx.new(|cx| {
1043 let mut branch = Self {
1044 branch_state: Some(BufferBranchState {
1045 base_buffer: this.clone(),
1046 merged_operations: Default::default(),
1047 }),
1048 language: self.language.clone(),
1049 has_conflict: self.has_conflict,
1050 has_unsaved_edits: Cell::new(self.has_unsaved_edits.get_mut().clone()),
1051 _subscriptions: vec![cx.subscribe(&this, Self::on_base_buffer_event)],
1052 ..Self::build(self.text.branch(), self.file.clone(), self.capability())
1053 };
1054 if let Some(language_registry) = self.language_registry() {
1055 branch.set_language_registry(language_registry);
1056 }
1057
1058 // Reparse the branch buffer so that we get syntax highlighting immediately.
1059 branch.reparse(cx);
1060
1061 branch
1062 })
1063 }
1064
1065 pub fn preview_edits(
1066 &self,
1067 edits: Arc<[(Range<Anchor>, String)]>,
1068 cx: &App,
1069 ) -> Task<EditPreview> {
1070 let registry = self.language_registry();
1071 let language = self.language().cloned();
1072 let old_snapshot = self.text.snapshot();
1073 let mut branch_buffer = self.text.branch();
1074 let mut syntax_snapshot = self.syntax_map.lock().snapshot();
1075 cx.background_spawn(async move {
1076 if !edits.is_empty() {
1077 if let Some(language) = language.clone() {
1078 syntax_snapshot.reparse(&old_snapshot, registry.clone(), language);
1079 }
1080
1081 branch_buffer.edit(edits.iter().cloned());
1082 let snapshot = branch_buffer.snapshot();
1083 syntax_snapshot.interpolate(&snapshot);
1084
1085 if let Some(language) = language {
1086 syntax_snapshot.reparse(&snapshot, registry, language);
1087 }
1088 }
1089 EditPreview {
1090 old_snapshot,
1091 applied_edits_snapshot: branch_buffer.snapshot(),
1092 syntax_snapshot,
1093 }
1094 })
1095 }
1096
1097 /// Applies all of the changes in this buffer that intersect any of the
1098 /// given `ranges` to its base buffer.
1099 ///
1100 /// If `ranges` is empty, then all changes will be applied. This buffer must
1101 /// be a branch buffer to call this method.
1102 pub fn merge_into_base(&mut self, ranges: Vec<Range<usize>>, cx: &mut Context<Self>) {
1103 let Some(base_buffer) = self.base_buffer() else {
1104 debug_panic!("not a branch buffer");
1105 return;
1106 };
1107
1108 let mut ranges = if ranges.is_empty() {
1109 &[0..usize::MAX]
1110 } else {
1111 ranges.as_slice()
1112 }
1113 .into_iter()
1114 .peekable();
1115
1116 let mut edits = Vec::new();
1117 for edit in self.edits_since::<usize>(&base_buffer.read(cx).version()) {
1118 let mut is_included = false;
1119 while let Some(range) = ranges.peek() {
1120 if range.end < edit.new.start {
1121 ranges.next().unwrap();
1122 } else {
1123 if range.start <= edit.new.end {
1124 is_included = true;
1125 }
1126 break;
1127 }
1128 }
1129
1130 if is_included {
1131 edits.push((
1132 edit.old.clone(),
1133 self.text_for_range(edit.new.clone()).collect::<String>(),
1134 ));
1135 }
1136 }
1137
1138 let operation = base_buffer.update(cx, |base_buffer, cx| {
1139 // cx.emit(BufferEvent::DiffBaseChanged);
1140 base_buffer.edit(edits, None, cx)
1141 });
1142
1143 if let Some(operation) = operation {
1144 if let Some(BufferBranchState {
1145 merged_operations, ..
1146 }) = &mut self.branch_state
1147 {
1148 merged_operations.push(operation);
1149 }
1150 }
1151 }
1152
1153 fn on_base_buffer_event(
1154 &mut self,
1155 _: Entity<Buffer>,
1156 event: &BufferEvent,
1157 cx: &mut Context<Self>,
1158 ) {
1159 let BufferEvent::Operation { operation, .. } = event else {
1160 return;
1161 };
1162 let Some(BufferBranchState {
1163 merged_operations, ..
1164 }) = &mut self.branch_state
1165 else {
1166 return;
1167 };
1168
1169 let mut operation_to_undo = None;
1170 if let Operation::Buffer(text::Operation::Edit(operation)) = &operation {
1171 if let Ok(ix) = merged_operations.binary_search(&operation.timestamp) {
1172 merged_operations.remove(ix);
1173 operation_to_undo = Some(operation.timestamp);
1174 }
1175 }
1176
1177 self.apply_ops([operation.clone()], cx);
1178
1179 if let Some(timestamp) = operation_to_undo {
1180 let counts = [(timestamp, u32::MAX)].into_iter().collect();
1181 self.undo_operations(counts, cx);
1182 }
1183 }
1184
1185 #[cfg(test)]
1186 pub(crate) fn as_text_snapshot(&self) -> &text::BufferSnapshot {
1187 &self.text
1188 }
1189
1190 /// Retrieve a snapshot of the buffer's raw text, without any
1191 /// language-related state like the syntax tree or diagnostics.
1192 pub fn text_snapshot(&self) -> text::BufferSnapshot {
1193 self.text.snapshot()
1194 }
1195
1196 /// The file associated with the buffer, if any.
1197 pub fn file(&self) -> Option<&Arc<dyn File>> {
1198 self.file.as_ref()
1199 }
1200
1201 /// The version of the buffer that was last saved or reloaded from disk.
1202 pub fn saved_version(&self) -> &clock::Global {
1203 &self.saved_version
1204 }
1205
1206 /// The mtime of the buffer's file when the buffer was last saved or reloaded from disk.
1207 pub fn saved_mtime(&self) -> Option<MTime> {
1208 self.saved_mtime
1209 }
1210
1211 /// Assign a language to the buffer.
1212 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1213 self.non_text_state_update_count += 1;
1214 self.syntax_map.lock().clear(&self.text);
1215 self.language = language;
1216 self.was_changed();
1217 self.reparse(cx);
1218 cx.emit(BufferEvent::LanguageChanged);
1219 }
1220
1221 /// Assign a language registry to the buffer. This allows the buffer to retrieve
1222 /// other languages if parts of the buffer are written in different languages.
1223 pub fn set_language_registry(&self, language_registry: Arc<LanguageRegistry>) {
1224 self.syntax_map
1225 .lock()
1226 .set_language_registry(language_registry);
1227 }
1228
1229 pub fn language_registry(&self) -> Option<Arc<LanguageRegistry>> {
1230 self.syntax_map.lock().language_registry()
1231 }
1232
1233 /// Assign the buffer a new [`Capability`].
1234 pub fn set_capability(&mut self, capability: Capability, cx: &mut Context<Self>) {
1235 self.capability = capability;
1236 cx.emit(BufferEvent::CapabilityChanged)
1237 }
1238
1239 /// This method is called to signal that the buffer has been saved.
1240 pub fn did_save(
1241 &mut self,
1242 version: clock::Global,
1243 mtime: Option<MTime>,
1244 cx: &mut Context<Self>,
1245 ) {
1246 self.saved_version = version;
1247 self.has_unsaved_edits
1248 .set((self.saved_version().clone(), false));
1249 self.has_conflict = false;
1250 self.saved_mtime = mtime;
1251 self.was_changed();
1252 cx.emit(BufferEvent::Saved);
1253 cx.notify();
1254 }
1255
1256 /// This method is called to signal that the buffer has been discarded.
1257 pub fn discarded(&self, cx: &mut Context<Self>) {
1258 cx.emit(BufferEvent::Discarded);
1259 cx.notify();
1260 }
1261
1262 /// Reloads the contents of the buffer from disk.
1263 pub fn reload(&mut self, cx: &Context<Self>) -> oneshot::Receiver<Option<Transaction>> {
1264 let (tx, rx) = futures::channel::oneshot::channel();
1265 let prev_version = self.text.version();
1266 self.reload_task = Some(cx.spawn(async move |this, cx| {
1267 let Some((new_mtime, new_text)) = this.update(cx, |this, cx| {
1268 let file = this.file.as_ref()?.as_local()?;
1269
1270 Some((file.disk_state().mtime(), file.load(cx)))
1271 })?
1272 else {
1273 return Ok(());
1274 };
1275
1276 let new_text = new_text.await?;
1277 let diff = this
1278 .update(cx, |this, cx| this.diff(new_text.clone(), cx))?
1279 .await;
1280 this.update(cx, |this, cx| {
1281 if this.version() == diff.base_version {
1282 this.finalize_last_transaction();
1283 this.apply_diff(diff, cx);
1284 tx.send(this.finalize_last_transaction().cloned()).ok();
1285 this.has_conflict = false;
1286 this.did_reload(this.version(), this.line_ending(), new_mtime, cx);
1287 } else {
1288 if !diff.edits.is_empty()
1289 || this
1290 .edits_since::<usize>(&diff.base_version)
1291 .next()
1292 .is_some()
1293 {
1294 this.has_conflict = true;
1295 }
1296
1297 this.did_reload(prev_version, this.line_ending(), this.saved_mtime, cx);
1298 }
1299
1300 this.reload_task.take();
1301 })
1302 }));
1303 rx
1304 }
1305
1306 /// This method is called to signal that the buffer has been reloaded.
1307 pub fn did_reload(
1308 &mut self,
1309 version: clock::Global,
1310 line_ending: LineEnding,
1311 mtime: Option<MTime>,
1312 cx: &mut Context<Self>,
1313 ) {
1314 self.saved_version = version;
1315 self.has_unsaved_edits
1316 .set((self.saved_version.clone(), false));
1317 self.text.set_line_ending(line_ending);
1318 self.saved_mtime = mtime;
1319 cx.emit(BufferEvent::Reloaded);
1320 cx.notify();
1321 }
1322
1323 /// Updates the [`File`] backing this buffer. This should be called when
1324 /// the file has changed or has been deleted.
1325 pub fn file_updated(&mut self, new_file: Arc<dyn File>, cx: &mut Context<Self>) {
1326 let was_dirty = self.is_dirty();
1327 let mut file_changed = false;
1328
1329 if let Some(old_file) = self.file.as_ref() {
1330 if new_file.path() != old_file.path() {
1331 file_changed = true;
1332 }
1333
1334 let old_state = old_file.disk_state();
1335 let new_state = new_file.disk_state();
1336 if old_state != new_state {
1337 file_changed = true;
1338 if !was_dirty && matches!(new_state, DiskState::Present { .. }) {
1339 cx.emit(BufferEvent::ReloadNeeded)
1340 }
1341 }
1342 } else {
1343 file_changed = true;
1344 };
1345
1346 self.file = Some(new_file);
1347 if file_changed {
1348 self.was_changed();
1349 self.non_text_state_update_count += 1;
1350 if was_dirty != self.is_dirty() {
1351 cx.emit(BufferEvent::DirtyChanged);
1352 }
1353 cx.emit(BufferEvent::FileHandleChanged);
1354 cx.notify();
1355 }
1356 }
1357
1358 pub fn base_buffer(&self) -> Option<Entity<Self>> {
1359 Some(self.branch_state.as_ref()?.base_buffer.clone())
1360 }
1361
1362 /// Returns the primary [`Language`] assigned to this [`Buffer`].
1363 pub fn language(&self) -> Option<&Arc<Language>> {
1364 self.language.as_ref()
1365 }
1366
1367 /// Returns the [`Language`] at the given location.
1368 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
1369 let offset = position.to_offset(self);
1370 self.syntax_map
1371 .lock()
1372 .layers_for_range(offset..offset, &self.text, false)
1373 .last()
1374 .map(|info| info.language.clone())
1375 .or_else(|| self.language.clone())
1376 }
1377
1378 /// Returns each [`Language`] for the active syntax layers at the given location.
1379 pub fn languages_at<D: ToOffset>(&self, position: D) -> Vec<Arc<Language>> {
1380 let offset = position.to_offset(self);
1381 let mut languages: Vec<Arc<Language>> = self
1382 .syntax_map
1383 .lock()
1384 .layers_for_range(offset..offset, &self.text, false)
1385 .map(|info| info.language.clone())
1386 .collect();
1387
1388 if languages.is_empty() {
1389 if let Some(buffer_language) = self.language() {
1390 languages.push(buffer_language.clone());
1391 }
1392 }
1393
1394 languages
1395 }
1396
1397 /// An integer version number that accounts for all updates besides
1398 /// the buffer's text itself (which is versioned via a version vector).
1399 pub fn non_text_state_update_count(&self) -> usize {
1400 self.non_text_state_update_count
1401 }
1402
1403 /// Whether the buffer is being parsed in the background.
1404 #[cfg(any(test, feature = "test-support"))]
1405 pub fn is_parsing(&self) -> bool {
1406 self.reparse.is_some()
1407 }
1408
1409 /// Indicates whether the buffer contains any regions that may be
1410 /// written in a language that hasn't been loaded yet.
1411 pub fn contains_unknown_injections(&self) -> bool {
1412 self.syntax_map.lock().contains_unknown_injections()
1413 }
1414
1415 #[cfg(test)]
1416 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
1417 self.sync_parse_timeout = timeout;
1418 }
1419
1420 /// Called after an edit to synchronize the buffer's main parse tree with
1421 /// the buffer's new underlying state.
1422 ///
1423 /// Locks the syntax map and interpolates the edits since the last reparse
1424 /// into the foreground syntax tree.
1425 ///
1426 /// Then takes a stable snapshot of the syntax map before unlocking it.
1427 /// The snapshot with the interpolated edits is sent to a background thread,
1428 /// where we ask Tree-sitter to perform an incremental parse.
1429 ///
1430 /// Meanwhile, in the foreground, we block the main thread for up to 1ms
1431 /// waiting on the parse to complete. As soon as it completes, we proceed
1432 /// synchronously, unless a 1ms timeout elapses.
1433 ///
1434 /// If we time out waiting on the parse, we spawn a second task waiting
1435 /// until the parse does complete and return with the interpolated tree still
1436 /// in the foreground. When the background parse completes, call back into
1437 /// the main thread and assign the foreground parse state.
1438 ///
1439 /// If the buffer or grammar changed since the start of the background parse,
1440 /// initiate an additional reparse recursively. To avoid concurrent parses
1441 /// for the same buffer, we only initiate a new parse if we are not already
1442 /// parsing in the background.
1443 pub fn reparse(&mut self, cx: &mut Context<Self>) {
1444 if self.reparse.is_some() {
1445 return;
1446 }
1447 let language = if let Some(language) = self.language.clone() {
1448 language
1449 } else {
1450 return;
1451 };
1452
1453 let text = self.text_snapshot();
1454 let parsed_version = self.version();
1455
1456 let mut syntax_map = self.syntax_map.lock();
1457 syntax_map.interpolate(&text);
1458 let language_registry = syntax_map.language_registry();
1459 let mut syntax_snapshot = syntax_map.snapshot();
1460 drop(syntax_map);
1461
1462 let parse_task = cx.background_spawn({
1463 let language = language.clone();
1464 let language_registry = language_registry.clone();
1465 async move {
1466 syntax_snapshot.reparse(&text, language_registry, language);
1467 syntax_snapshot
1468 }
1469 });
1470
1471 self.parse_status.0.send(ParseStatus::Parsing).unwrap();
1472 match cx
1473 .background_executor()
1474 .block_with_timeout(self.sync_parse_timeout, parse_task)
1475 {
1476 Ok(new_syntax_snapshot) => {
1477 self.did_finish_parsing(new_syntax_snapshot, cx);
1478 self.reparse = None;
1479 }
1480 Err(parse_task) => {
1481 self.reparse = Some(cx.spawn(async move |this, cx| {
1482 let new_syntax_map = parse_task.await;
1483 this.update(cx, move |this, cx| {
1484 let grammar_changed =
1485 this.language.as_ref().map_or(true, |current_language| {
1486 !Arc::ptr_eq(&language, current_language)
1487 });
1488 let language_registry_changed = new_syntax_map
1489 .contains_unknown_injections()
1490 && language_registry.map_or(false, |registry| {
1491 registry.version() != new_syntax_map.language_registry_version()
1492 });
1493 let parse_again = language_registry_changed
1494 || grammar_changed
1495 || this.version.changed_since(&parsed_version);
1496 this.did_finish_parsing(new_syntax_map, cx);
1497 this.reparse = None;
1498 if parse_again {
1499 this.reparse(cx);
1500 }
1501 })
1502 .ok();
1503 }));
1504 }
1505 }
1506 }
1507
1508 fn did_finish_parsing(&mut self, syntax_snapshot: SyntaxSnapshot, cx: &mut Context<Self>) {
1509 self.was_changed();
1510 self.non_text_state_update_count += 1;
1511 self.syntax_map.lock().did_parse(syntax_snapshot);
1512 self.request_autoindent(cx);
1513 self.parse_status.0.send(ParseStatus::Idle).unwrap();
1514 cx.emit(BufferEvent::Reparsed);
1515 cx.notify();
1516 }
1517
1518 pub fn parse_status(&self) -> watch::Receiver<ParseStatus> {
1519 self.parse_status.1.clone()
1520 }
1521
1522 /// Assign to the buffer a set of diagnostics created by a given language server.
1523 pub fn update_diagnostics(
1524 &mut self,
1525 server_id: LanguageServerId,
1526 diagnostics: DiagnosticSet,
1527 cx: &mut Context<Self>,
1528 ) {
1529 let lamport_timestamp = self.text.lamport_clock.tick();
1530 let op = Operation::UpdateDiagnostics {
1531 server_id,
1532 diagnostics: diagnostics.iter().cloned().collect(),
1533 lamport_timestamp,
1534 };
1535 self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
1536 self.send_operation(op, true, cx);
1537 }
1538
1539 pub fn get_diagnostics(&self, server_id: LanguageServerId) -> Option<&DiagnosticSet> {
1540 let Ok(idx) = self.diagnostics.binary_search_by_key(&server_id, |v| v.0) else {
1541 return None;
1542 };
1543 Some(&self.diagnostics[idx].1)
1544 }
1545
1546 fn request_autoindent(&mut self, cx: &mut Context<Self>) {
1547 if let Some(indent_sizes) = self.compute_autoindents() {
1548 let indent_sizes = cx.background_spawn(indent_sizes);
1549 match cx
1550 .background_executor()
1551 .block_with_timeout(Duration::from_micros(500), indent_sizes)
1552 {
1553 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
1554 Err(indent_sizes) => {
1555 self.pending_autoindent = Some(cx.spawn(async move |this, cx| {
1556 let indent_sizes = indent_sizes.await;
1557 this.update(cx, |this, cx| {
1558 this.apply_autoindents(indent_sizes, cx);
1559 })
1560 .ok();
1561 }));
1562 }
1563 }
1564 } else {
1565 self.autoindent_requests.clear();
1566 }
1567 }
1568
1569 fn compute_autoindents(
1570 &self,
1571 ) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>> + use<>> {
1572 let max_rows_between_yields = 100;
1573 let snapshot = self.snapshot();
1574 if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
1575 return None;
1576 }
1577
1578 let autoindent_requests = self.autoindent_requests.clone();
1579 Some(async move {
1580 let mut indent_sizes = BTreeMap::<u32, (IndentSize, bool)>::new();
1581 for request in autoindent_requests {
1582 // Resolve each edited range to its row in the current buffer and in the
1583 // buffer before this batch of edits.
1584 let mut row_ranges = Vec::new();
1585 let mut old_to_new_rows = BTreeMap::new();
1586 let mut language_indent_sizes_by_new_row = Vec::new();
1587 for entry in &request.entries {
1588 let position = entry.range.start;
1589 let new_row = position.to_point(&snapshot).row;
1590 let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
1591 language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
1592
1593 if !entry.first_line_is_new {
1594 let old_row = position.to_point(&request.before_edit).row;
1595 old_to_new_rows.insert(old_row, new_row);
1596 }
1597 row_ranges.push((new_row..new_end_row, entry.original_indent_column));
1598 }
1599
1600 // Build a map containing the suggested indentation for each of the edited lines
1601 // with respect to the state of the buffer before these edits. This map is keyed
1602 // by the rows for these lines in the current state of the buffer.
1603 let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
1604 let old_edited_ranges =
1605 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
1606 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1607 let mut language_indent_size = IndentSize::default();
1608 for old_edited_range in old_edited_ranges {
1609 let suggestions = request
1610 .before_edit
1611 .suggest_autoindents(old_edited_range.clone())
1612 .into_iter()
1613 .flatten();
1614 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1615 if let Some(suggestion) = suggestion {
1616 let new_row = *old_to_new_rows.get(&old_row).unwrap();
1617
1618 // Find the indent size based on the language for this row.
1619 while let Some((row, size)) = language_indent_sizes.peek() {
1620 if *row > new_row {
1621 break;
1622 }
1623 language_indent_size = *size;
1624 language_indent_sizes.next();
1625 }
1626
1627 let suggested_indent = old_to_new_rows
1628 .get(&suggestion.basis_row)
1629 .and_then(|from_row| {
1630 Some(old_suggestions.get(from_row).copied()?.0)
1631 })
1632 .unwrap_or_else(|| {
1633 request
1634 .before_edit
1635 .indent_size_for_line(suggestion.basis_row)
1636 })
1637 .with_delta(suggestion.delta, language_indent_size);
1638 old_suggestions
1639 .insert(new_row, (suggested_indent, suggestion.within_error));
1640 }
1641 }
1642 yield_now().await;
1643 }
1644
1645 // Compute new suggestions for each line, but only include them in the result
1646 // if they differ from the old suggestion for that line.
1647 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1648 let mut language_indent_size = IndentSize::default();
1649 for (row_range, original_indent_column) in row_ranges {
1650 let new_edited_row_range = if request.is_block_mode {
1651 row_range.start..row_range.start + 1
1652 } else {
1653 row_range.clone()
1654 };
1655
1656 let suggestions = snapshot
1657 .suggest_autoindents(new_edited_row_range.clone())
1658 .into_iter()
1659 .flatten();
1660 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
1661 if let Some(suggestion) = suggestion {
1662 // Find the indent size based on the language for this row.
1663 while let Some((row, size)) = language_indent_sizes.peek() {
1664 if *row > new_row {
1665 break;
1666 }
1667 language_indent_size = *size;
1668 language_indent_sizes.next();
1669 }
1670
1671 let suggested_indent = indent_sizes
1672 .get(&suggestion.basis_row)
1673 .copied()
1674 .map(|e| e.0)
1675 .unwrap_or_else(|| {
1676 snapshot.indent_size_for_line(suggestion.basis_row)
1677 })
1678 .with_delta(suggestion.delta, language_indent_size);
1679
1680 if old_suggestions.get(&new_row).map_or(
1681 true,
1682 |(old_indentation, was_within_error)| {
1683 suggested_indent != *old_indentation
1684 && (!suggestion.within_error || *was_within_error)
1685 },
1686 ) {
1687 indent_sizes.insert(
1688 new_row,
1689 (suggested_indent, request.ignore_empty_lines),
1690 );
1691 }
1692 }
1693 }
1694
1695 if let (true, Some(original_indent_column)) =
1696 (request.is_block_mode, original_indent_column)
1697 {
1698 let new_indent =
1699 if let Some((indent, _)) = indent_sizes.get(&row_range.start) {
1700 *indent
1701 } else {
1702 snapshot.indent_size_for_line(row_range.start)
1703 };
1704 let delta = new_indent.len as i64 - original_indent_column as i64;
1705 if delta != 0 {
1706 for row in row_range.skip(1) {
1707 indent_sizes.entry(row).or_insert_with(|| {
1708 let mut size = snapshot.indent_size_for_line(row);
1709 if size.kind == new_indent.kind {
1710 match delta.cmp(&0) {
1711 Ordering::Greater => size.len += delta as u32,
1712 Ordering::Less => {
1713 size.len = size.len.saturating_sub(-delta as u32)
1714 }
1715 Ordering::Equal => {}
1716 }
1717 }
1718 (size, request.ignore_empty_lines)
1719 });
1720 }
1721 }
1722 }
1723
1724 yield_now().await;
1725 }
1726 }
1727
1728 indent_sizes
1729 .into_iter()
1730 .filter_map(|(row, (indent, ignore_empty_lines))| {
1731 if ignore_empty_lines && snapshot.line_len(row) == 0 {
1732 None
1733 } else {
1734 Some((row, indent))
1735 }
1736 })
1737 .collect()
1738 })
1739 }
1740
1741 fn apply_autoindents(
1742 &mut self,
1743 indent_sizes: BTreeMap<u32, IndentSize>,
1744 cx: &mut Context<Self>,
1745 ) {
1746 self.autoindent_requests.clear();
1747
1748 let edits: Vec<_> = indent_sizes
1749 .into_iter()
1750 .filter_map(|(row, indent_size)| {
1751 let current_size = indent_size_for_line(self, row);
1752 Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
1753 })
1754 .collect();
1755
1756 let preserve_preview = self.preserve_preview();
1757 self.edit(edits, None, cx);
1758 if preserve_preview {
1759 self.refresh_preview();
1760 }
1761 }
1762
1763 /// Create a minimal edit that will cause the given row to be indented
1764 /// with the given size. After applying this edit, the length of the line
1765 /// will always be at least `new_size.len`.
1766 pub fn edit_for_indent_size_adjustment(
1767 row: u32,
1768 current_size: IndentSize,
1769 new_size: IndentSize,
1770 ) -> Option<(Range<Point>, String)> {
1771 if new_size.kind == current_size.kind {
1772 match new_size.len.cmp(¤t_size.len) {
1773 Ordering::Greater => {
1774 let point = Point::new(row, 0);
1775 Some((
1776 point..point,
1777 iter::repeat(new_size.char())
1778 .take((new_size.len - current_size.len) as usize)
1779 .collect::<String>(),
1780 ))
1781 }
1782
1783 Ordering::Less => Some((
1784 Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
1785 String::new(),
1786 )),
1787
1788 Ordering::Equal => None,
1789 }
1790 } else {
1791 Some((
1792 Point::new(row, 0)..Point::new(row, current_size.len),
1793 iter::repeat(new_size.char())
1794 .take(new_size.len as usize)
1795 .collect::<String>(),
1796 ))
1797 }
1798 }
1799
1800 /// Spawns a background task that asynchronously computes a `Diff` between the buffer's text
1801 /// and the given new text.
1802 pub fn diff(&self, mut new_text: String, cx: &App) -> Task<Diff> {
1803 let old_text = self.as_rope().clone();
1804 let base_version = self.version();
1805 cx.background_executor()
1806 .spawn_labeled(*BUFFER_DIFF_TASK, async move {
1807 let old_text = old_text.to_string();
1808 let line_ending = LineEnding::detect(&new_text);
1809 LineEnding::normalize(&mut new_text);
1810 let edits = text_diff(&old_text, &new_text);
1811 Diff {
1812 base_version,
1813 line_ending,
1814 edits,
1815 }
1816 })
1817 }
1818
1819 /// Spawns a background task that searches the buffer for any whitespace
1820 /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
1821 pub fn remove_trailing_whitespace(&self, cx: &App) -> Task<Diff> {
1822 let old_text = self.as_rope().clone();
1823 let line_ending = self.line_ending();
1824 let base_version = self.version();
1825 cx.background_spawn(async move {
1826 let ranges = trailing_whitespace_ranges(&old_text);
1827 let empty = Arc::<str>::from("");
1828 Diff {
1829 base_version,
1830 line_ending,
1831 edits: ranges
1832 .into_iter()
1833 .map(|range| (range, empty.clone()))
1834 .collect(),
1835 }
1836 })
1837 }
1838
1839 /// Ensures that the buffer ends with a single newline character, and
1840 /// no other whitespace.
1841 pub fn ensure_final_newline(&mut self, cx: &mut Context<Self>) {
1842 let len = self.len();
1843 let mut offset = len;
1844 for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
1845 let non_whitespace_len = chunk
1846 .trim_end_matches(|c: char| c.is_ascii_whitespace())
1847 .len();
1848 offset -= chunk.len();
1849 offset += non_whitespace_len;
1850 if non_whitespace_len != 0 {
1851 if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
1852 return;
1853 }
1854 break;
1855 }
1856 }
1857 self.edit([(offset..len, "\n")], None, cx);
1858 }
1859
1860 /// Applies a diff to the buffer. If the buffer has changed since the given diff was
1861 /// calculated, then adjust the diff to account for those changes, and discard any
1862 /// parts of the diff that conflict with those changes.
1863 pub fn apply_diff(&mut self, diff: Diff, cx: &mut Context<Self>) -> Option<TransactionId> {
1864 let snapshot = self.snapshot();
1865 let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
1866 let mut delta = 0;
1867 let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
1868 while let Some(edit_since) = edits_since.peek() {
1869 // If the edit occurs after a diff hunk, then it does not
1870 // affect that hunk.
1871 if edit_since.old.start > range.end {
1872 break;
1873 }
1874 // If the edit precedes the diff hunk, then adjust the hunk
1875 // to reflect the edit.
1876 else if edit_since.old.end < range.start {
1877 delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
1878 edits_since.next();
1879 }
1880 // If the edit intersects a diff hunk, then discard that hunk.
1881 else {
1882 return None;
1883 }
1884 }
1885
1886 let start = (range.start as i64 + delta) as usize;
1887 let end = (range.end as i64 + delta) as usize;
1888 Some((start..end, new_text))
1889 });
1890
1891 self.start_transaction();
1892 self.text.set_line_ending(diff.line_ending);
1893 self.edit(adjusted_edits, None, cx);
1894 self.end_transaction(cx)
1895 }
1896
1897 fn has_unsaved_edits(&self) -> bool {
1898 let (last_version, has_unsaved_edits) = self.has_unsaved_edits.take();
1899
1900 if last_version == self.version {
1901 self.has_unsaved_edits
1902 .set((last_version, has_unsaved_edits));
1903 return has_unsaved_edits;
1904 }
1905
1906 let has_edits = self.has_edits_since(&self.saved_version);
1907 self.has_unsaved_edits
1908 .set((self.version.clone(), has_edits));
1909 has_edits
1910 }
1911
1912 /// Checks if the buffer has unsaved changes.
1913 pub fn is_dirty(&self) -> bool {
1914 if self.capability == Capability::ReadOnly {
1915 return false;
1916 }
1917 if self.has_conflict {
1918 return true;
1919 }
1920 match self.file.as_ref().map(|f| f.disk_state()) {
1921 Some(DiskState::New) | Some(DiskState::Deleted) => {
1922 !self.is_empty() && self.has_unsaved_edits()
1923 }
1924 _ => self.has_unsaved_edits(),
1925 }
1926 }
1927
1928 /// Checks if the buffer and its file have both changed since the buffer
1929 /// was last saved or reloaded.
1930 pub fn has_conflict(&self) -> bool {
1931 if self.has_conflict {
1932 return true;
1933 }
1934 let Some(file) = self.file.as_ref() else {
1935 return false;
1936 };
1937 match file.disk_state() {
1938 DiskState::New => false,
1939 DiskState::Present { mtime } => match self.saved_mtime {
1940 Some(saved_mtime) => {
1941 mtime.bad_is_greater_than(saved_mtime) && self.has_unsaved_edits()
1942 }
1943 None => true,
1944 },
1945 DiskState::Deleted => false,
1946 }
1947 }
1948
1949 /// Gets a [`Subscription`] that tracks all of the changes to the buffer's text.
1950 pub fn subscribe(&mut self) -> Subscription {
1951 self.text.subscribe()
1952 }
1953
1954 /// Adds a bit to the list of bits that are set when the buffer's text changes.
1955 ///
1956 /// This allows downstream code to check if the buffer's text has changed without
1957 /// waiting for an effect cycle, which would be required if using eents.
1958 pub fn record_changes(&mut self, bit: rc::Weak<Cell<bool>>) {
1959 if let Err(ix) = self
1960 .change_bits
1961 .binary_search_by_key(&rc::Weak::as_ptr(&bit), rc::Weak::as_ptr)
1962 {
1963 self.change_bits.insert(ix, bit);
1964 }
1965 }
1966
1967 fn was_changed(&mut self) {
1968 self.change_bits.retain(|change_bit| {
1969 change_bit.upgrade().map_or(false, |bit| {
1970 bit.replace(true);
1971 true
1972 })
1973 });
1974 }
1975
1976 /// Starts a transaction, if one is not already in-progress. When undoing or
1977 /// redoing edits, all of the edits performed within a transaction are undone
1978 /// or redone together.
1979 pub fn start_transaction(&mut self) -> Option<TransactionId> {
1980 self.start_transaction_at(Instant::now())
1981 }
1982
1983 /// Starts a transaction, providing the current time. Subsequent transactions
1984 /// that occur within a short period of time will be grouped together. This
1985 /// is controlled by the buffer's undo grouping duration.
1986 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
1987 self.transaction_depth += 1;
1988 if self.was_dirty_before_starting_transaction.is_none() {
1989 self.was_dirty_before_starting_transaction = Some(self.is_dirty());
1990 }
1991 self.text.start_transaction_at(now)
1992 }
1993
1994 /// Terminates the current transaction, if this is the outermost transaction.
1995 pub fn end_transaction(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
1996 self.end_transaction_at(Instant::now(), cx)
1997 }
1998
1999 /// Terminates the current transaction, providing the current time. Subsequent transactions
2000 /// that occur within a short period of time will be grouped together. This
2001 /// is controlled by the buffer's undo grouping duration.
2002 pub fn end_transaction_at(
2003 &mut self,
2004 now: Instant,
2005 cx: &mut Context<Self>,
2006 ) -> Option<TransactionId> {
2007 assert!(self.transaction_depth > 0);
2008 self.transaction_depth -= 1;
2009 let was_dirty = if self.transaction_depth == 0 {
2010 self.was_dirty_before_starting_transaction.take().unwrap()
2011 } else {
2012 false
2013 };
2014 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
2015 self.did_edit(&start_version, was_dirty, cx);
2016 Some(transaction_id)
2017 } else {
2018 None
2019 }
2020 }
2021
2022 /// Manually add a transaction to the buffer's undo history.
2023 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
2024 self.text.push_transaction(transaction, now);
2025 }
2026
2027 /// Prevent the last transaction from being grouped with any subsequent transactions,
2028 /// even if they occur with the buffer's undo grouping duration.
2029 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
2030 self.text.finalize_last_transaction()
2031 }
2032
2033 /// Manually group all changes since a given transaction.
2034 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
2035 self.text.group_until_transaction(transaction_id);
2036 }
2037
2038 /// Manually remove a transaction from the buffer's undo history
2039 pub fn forget_transaction(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
2040 self.text.forget_transaction(transaction_id)
2041 }
2042
2043 /// Retrieve a transaction from the buffer's undo history
2044 pub fn get_transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> {
2045 self.text.get_transaction(transaction_id)
2046 }
2047
2048 /// Manually merge two transactions in the buffer's undo history.
2049 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
2050 self.text.merge_transactions(transaction, destination);
2051 }
2052
2053 /// Waits for the buffer to receive operations with the given timestamps.
2054 pub fn wait_for_edits<It: IntoIterator<Item = clock::Lamport>>(
2055 &mut self,
2056 edit_ids: It,
2057 ) -> impl Future<Output = Result<()>> + use<It> {
2058 self.text.wait_for_edits(edit_ids)
2059 }
2060
2061 /// Waits for the buffer to receive the operations necessary for resolving the given anchors.
2062 pub fn wait_for_anchors<It: IntoIterator<Item = Anchor>>(
2063 &mut self,
2064 anchors: It,
2065 ) -> impl 'static + Future<Output = Result<()>> + use<It> {
2066 self.text.wait_for_anchors(anchors)
2067 }
2068
2069 /// Waits for the buffer to receive operations up to the given version.
2070 pub fn wait_for_version(
2071 &mut self,
2072 version: clock::Global,
2073 ) -> impl Future<Output = Result<()>> + use<> {
2074 self.text.wait_for_version(version)
2075 }
2076
2077 /// Forces all futures returned by [`Buffer::wait_for_version`], [`Buffer::wait_for_edits`], or
2078 /// [`Buffer::wait_for_version`] to resolve with an error.
2079 pub fn give_up_waiting(&mut self) {
2080 self.text.give_up_waiting();
2081 }
2082
2083 /// Stores a set of selections that should be broadcasted to all of the buffer's replicas.
2084 pub fn set_active_selections(
2085 &mut self,
2086 selections: Arc<[Selection<Anchor>]>,
2087 line_mode: bool,
2088 cursor_shape: CursorShape,
2089 cx: &mut Context<Self>,
2090 ) {
2091 let lamport_timestamp = self.text.lamport_clock.tick();
2092 self.remote_selections.insert(
2093 self.text.replica_id(),
2094 SelectionSet {
2095 selections: selections.clone(),
2096 lamport_timestamp,
2097 line_mode,
2098 cursor_shape,
2099 },
2100 );
2101 self.send_operation(
2102 Operation::UpdateSelections {
2103 selections,
2104 line_mode,
2105 lamport_timestamp,
2106 cursor_shape,
2107 },
2108 true,
2109 cx,
2110 );
2111 self.non_text_state_update_count += 1;
2112 cx.notify();
2113 }
2114
2115 /// Clears the selections, so that other replicas of the buffer do not see any selections for
2116 /// this replica.
2117 pub fn remove_active_selections(&mut self, cx: &mut Context<Self>) {
2118 if self
2119 .remote_selections
2120 .get(&self.text.replica_id())
2121 .map_or(true, |set| !set.selections.is_empty())
2122 {
2123 self.set_active_selections(Arc::default(), false, Default::default(), cx);
2124 }
2125 }
2126
2127 pub fn set_agent_selections(
2128 &mut self,
2129 selections: Arc<[Selection<Anchor>]>,
2130 line_mode: bool,
2131 cursor_shape: CursorShape,
2132 cx: &mut Context<Self>,
2133 ) {
2134 let lamport_timestamp = self.text.lamport_clock.tick();
2135 self.remote_selections.insert(
2136 AGENT_REPLICA_ID,
2137 SelectionSet {
2138 selections: selections.clone(),
2139 lamport_timestamp,
2140 line_mode,
2141 cursor_shape,
2142 },
2143 );
2144 self.non_text_state_update_count += 1;
2145 cx.notify();
2146 }
2147
2148 pub fn remove_agent_selections(&mut self, cx: &mut Context<Self>) {
2149 self.set_agent_selections(Arc::default(), false, Default::default(), cx);
2150 }
2151
2152 /// Replaces the buffer's entire text.
2153 pub fn set_text<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2154 where
2155 T: Into<Arc<str>>,
2156 {
2157 self.autoindent_requests.clear();
2158 self.edit([(0..self.len(), text)], None, cx)
2159 }
2160
2161 /// Appends the given text to the end of the buffer.
2162 pub fn append<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2163 where
2164 T: Into<Arc<str>>,
2165 {
2166 self.edit([(self.len()..self.len(), text)], None, cx)
2167 }
2168
2169 /// Applies the given edits to the buffer. Each edit is specified as a range of text to
2170 /// delete, and a string of text to insert at that location.
2171 ///
2172 /// If an [`AutoindentMode`] is provided, then the buffer will enqueue an auto-indent
2173 /// request for the edited ranges, which will be processed when the buffer finishes
2174 /// parsing.
2175 ///
2176 /// Parsing takes place at the end of a transaction, and may compute synchronously
2177 /// or asynchronously, depending on the changes.
2178 pub fn edit<I, S, T>(
2179 &mut self,
2180 edits_iter: I,
2181 autoindent_mode: Option<AutoindentMode>,
2182 cx: &mut Context<Self>,
2183 ) -> Option<clock::Lamport>
2184 where
2185 I: IntoIterator<Item = (Range<S>, T)>,
2186 S: ToOffset,
2187 T: Into<Arc<str>>,
2188 {
2189 // Skip invalid edits and coalesce contiguous ones.
2190 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
2191
2192 for (range, new_text) in edits_iter {
2193 let mut range = range.start.to_offset(self)..range.end.to_offset(self);
2194
2195 if range.start > range.end {
2196 mem::swap(&mut range.start, &mut range.end);
2197 }
2198 let new_text = new_text.into();
2199 if !new_text.is_empty() || !range.is_empty() {
2200 if let Some((prev_range, prev_text)) = edits.last_mut() {
2201 if prev_range.end >= range.start {
2202 prev_range.end = cmp::max(prev_range.end, range.end);
2203 *prev_text = format!("{prev_text}{new_text}").into();
2204 } else {
2205 edits.push((range, new_text));
2206 }
2207 } else {
2208 edits.push((range, new_text));
2209 }
2210 }
2211 }
2212 if edits.is_empty() {
2213 return None;
2214 }
2215
2216 self.start_transaction();
2217 self.pending_autoindent.take();
2218 let autoindent_request = autoindent_mode
2219 .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
2220
2221 let edit_operation = self.text.edit(edits.iter().cloned());
2222 let edit_id = edit_operation.timestamp();
2223
2224 if let Some((before_edit, mode)) = autoindent_request {
2225 let mut delta = 0isize;
2226 let entries = edits
2227 .into_iter()
2228 .enumerate()
2229 .zip(&edit_operation.as_edit().unwrap().new_text)
2230 .map(|((ix, (range, _)), new_text)| {
2231 let new_text_length = new_text.len();
2232 let old_start = range.start.to_point(&before_edit);
2233 let new_start = (delta + range.start as isize) as usize;
2234 let range_len = range.end - range.start;
2235 delta += new_text_length as isize - range_len as isize;
2236
2237 // Decide what range of the insertion to auto-indent, and whether
2238 // the first line of the insertion should be considered a newly-inserted line
2239 // or an edit to an existing line.
2240 let mut range_of_insertion_to_indent = 0..new_text_length;
2241 let mut first_line_is_new = true;
2242
2243 let old_line_start = before_edit.indent_size_for_line(old_start.row).len;
2244 let old_line_end = before_edit.line_len(old_start.row);
2245
2246 if old_start.column > old_line_start {
2247 first_line_is_new = false;
2248 }
2249
2250 if !new_text.contains('\n')
2251 && (old_start.column + (range_len as u32) < old_line_end
2252 || old_line_end == old_line_start)
2253 {
2254 first_line_is_new = false;
2255 }
2256
2257 // When inserting text starting with a newline, avoid auto-indenting the
2258 // previous line.
2259 if new_text.starts_with('\n') {
2260 range_of_insertion_to_indent.start += 1;
2261 first_line_is_new = true;
2262 }
2263
2264 let mut original_indent_column = None;
2265 if let AutoindentMode::Block {
2266 original_indent_columns,
2267 } = &mode
2268 {
2269 original_indent_column = Some(if new_text.starts_with('\n') {
2270 indent_size_for_text(
2271 new_text[range_of_insertion_to_indent.clone()].chars(),
2272 )
2273 .len
2274 } else {
2275 original_indent_columns
2276 .get(ix)
2277 .copied()
2278 .flatten()
2279 .unwrap_or_else(|| {
2280 indent_size_for_text(
2281 new_text[range_of_insertion_to_indent.clone()].chars(),
2282 )
2283 .len
2284 })
2285 });
2286
2287 // Avoid auto-indenting the line after the edit.
2288 if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
2289 range_of_insertion_to_indent.end -= 1;
2290 }
2291 }
2292
2293 AutoindentRequestEntry {
2294 first_line_is_new,
2295 original_indent_column,
2296 indent_size: before_edit.language_indent_size_at(range.start, cx),
2297 range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
2298 ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
2299 }
2300 })
2301 .collect();
2302
2303 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2304 before_edit,
2305 entries,
2306 is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
2307 ignore_empty_lines: false,
2308 }));
2309 }
2310
2311 self.end_transaction(cx);
2312 self.send_operation(Operation::Buffer(edit_operation), true, cx);
2313 Some(edit_id)
2314 }
2315
2316 fn did_edit(&mut self, old_version: &clock::Global, was_dirty: bool, cx: &mut Context<Self>) {
2317 self.was_changed();
2318
2319 if self.edits_since::<usize>(old_version).next().is_none() {
2320 return;
2321 }
2322
2323 self.reparse(cx);
2324 cx.emit(BufferEvent::Edited);
2325 if was_dirty != self.is_dirty() {
2326 cx.emit(BufferEvent::DirtyChanged);
2327 }
2328 cx.notify();
2329 }
2330
2331 pub fn autoindent_ranges<I, T>(&mut self, ranges: I, cx: &mut Context<Self>)
2332 where
2333 I: IntoIterator<Item = Range<T>>,
2334 T: ToOffset + Copy,
2335 {
2336 let before_edit = self.snapshot();
2337 let entries = ranges
2338 .into_iter()
2339 .map(|range| AutoindentRequestEntry {
2340 range: before_edit.anchor_before(range.start)..before_edit.anchor_after(range.end),
2341 first_line_is_new: true,
2342 indent_size: before_edit.language_indent_size_at(range.start, cx),
2343 original_indent_column: None,
2344 })
2345 .collect();
2346 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2347 before_edit,
2348 entries,
2349 is_block_mode: false,
2350 ignore_empty_lines: true,
2351 }));
2352 self.request_autoindent(cx);
2353 }
2354
2355 // Inserts newlines at the given position to create an empty line, returning the start of the new line.
2356 // You can also request the insertion of empty lines above and below the line starting at the returned point.
2357 pub fn insert_empty_line(
2358 &mut self,
2359 position: impl ToPoint,
2360 space_above: bool,
2361 space_below: bool,
2362 cx: &mut Context<Self>,
2363 ) -> Point {
2364 let mut position = position.to_point(self);
2365
2366 self.start_transaction();
2367
2368 self.edit(
2369 [(position..position, "\n")],
2370 Some(AutoindentMode::EachLine),
2371 cx,
2372 );
2373
2374 if position.column > 0 {
2375 position += Point::new(1, 0);
2376 }
2377
2378 if !self.is_line_blank(position.row) {
2379 self.edit(
2380 [(position..position, "\n")],
2381 Some(AutoindentMode::EachLine),
2382 cx,
2383 );
2384 }
2385
2386 if space_above && position.row > 0 && !self.is_line_blank(position.row - 1) {
2387 self.edit(
2388 [(position..position, "\n")],
2389 Some(AutoindentMode::EachLine),
2390 cx,
2391 );
2392 position.row += 1;
2393 }
2394
2395 if space_below
2396 && (position.row == self.max_point().row || !self.is_line_blank(position.row + 1))
2397 {
2398 self.edit(
2399 [(position..position, "\n")],
2400 Some(AutoindentMode::EachLine),
2401 cx,
2402 );
2403 }
2404
2405 self.end_transaction(cx);
2406
2407 position
2408 }
2409
2410 /// Applies the given remote operations to the buffer.
2411 pub fn apply_ops<I: IntoIterator<Item = Operation>>(&mut self, ops: I, cx: &mut Context<Self>) {
2412 self.pending_autoindent.take();
2413 let was_dirty = self.is_dirty();
2414 let old_version = self.version.clone();
2415 let mut deferred_ops = Vec::new();
2416 let buffer_ops = ops
2417 .into_iter()
2418 .filter_map(|op| match op {
2419 Operation::Buffer(op) => Some(op),
2420 _ => {
2421 if self.can_apply_op(&op) {
2422 self.apply_op(op, cx);
2423 } else {
2424 deferred_ops.push(op);
2425 }
2426 None
2427 }
2428 })
2429 .collect::<Vec<_>>();
2430 for operation in buffer_ops.iter() {
2431 self.send_operation(Operation::Buffer(operation.clone()), false, cx);
2432 }
2433 self.text.apply_ops(buffer_ops);
2434 self.deferred_ops.insert(deferred_ops);
2435 self.flush_deferred_ops(cx);
2436 self.did_edit(&old_version, was_dirty, cx);
2437 // Notify independently of whether the buffer was edited as the operations could include a
2438 // selection update.
2439 cx.notify();
2440 }
2441
2442 fn flush_deferred_ops(&mut self, cx: &mut Context<Self>) {
2443 let mut deferred_ops = Vec::new();
2444 for op in self.deferred_ops.drain().iter().cloned() {
2445 if self.can_apply_op(&op) {
2446 self.apply_op(op, cx);
2447 } else {
2448 deferred_ops.push(op);
2449 }
2450 }
2451 self.deferred_ops.insert(deferred_ops);
2452 }
2453
2454 pub fn has_deferred_ops(&self) -> bool {
2455 !self.deferred_ops.is_empty() || self.text.has_deferred_ops()
2456 }
2457
2458 fn can_apply_op(&self, operation: &Operation) -> bool {
2459 match operation {
2460 Operation::Buffer(_) => {
2461 unreachable!("buffer operations should never be applied at this layer")
2462 }
2463 Operation::UpdateDiagnostics {
2464 diagnostics: diagnostic_set,
2465 ..
2466 } => diagnostic_set.iter().all(|diagnostic| {
2467 self.text.can_resolve(&diagnostic.range.start)
2468 && self.text.can_resolve(&diagnostic.range.end)
2469 }),
2470 Operation::UpdateSelections { selections, .. } => selections
2471 .iter()
2472 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
2473 Operation::UpdateCompletionTriggers { .. } => true,
2474 }
2475 }
2476
2477 fn apply_op(&mut self, operation: Operation, cx: &mut Context<Self>) {
2478 match operation {
2479 Operation::Buffer(_) => {
2480 unreachable!("buffer operations should never be applied at this layer")
2481 }
2482 Operation::UpdateDiagnostics {
2483 server_id,
2484 diagnostics: diagnostic_set,
2485 lamport_timestamp,
2486 } => {
2487 let snapshot = self.snapshot();
2488 self.apply_diagnostic_update(
2489 server_id,
2490 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
2491 lamport_timestamp,
2492 cx,
2493 );
2494 }
2495 Operation::UpdateSelections {
2496 selections,
2497 lamport_timestamp,
2498 line_mode,
2499 cursor_shape,
2500 } => {
2501 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id) {
2502 if set.lamport_timestamp > lamport_timestamp {
2503 return;
2504 }
2505 }
2506
2507 self.remote_selections.insert(
2508 lamport_timestamp.replica_id,
2509 SelectionSet {
2510 selections,
2511 lamport_timestamp,
2512 line_mode,
2513 cursor_shape,
2514 },
2515 );
2516 self.text.lamport_clock.observe(lamport_timestamp);
2517 self.non_text_state_update_count += 1;
2518 }
2519 Operation::UpdateCompletionTriggers {
2520 triggers,
2521 lamport_timestamp,
2522 server_id,
2523 } => {
2524 if triggers.is_empty() {
2525 self.completion_triggers_per_language_server
2526 .remove(&server_id);
2527 self.completion_triggers = self
2528 .completion_triggers_per_language_server
2529 .values()
2530 .flat_map(|triggers| triggers.into_iter().cloned())
2531 .collect();
2532 } else {
2533 self.completion_triggers_per_language_server
2534 .insert(server_id, triggers.iter().cloned().collect());
2535 self.completion_triggers.extend(triggers);
2536 }
2537 self.text.lamport_clock.observe(lamport_timestamp);
2538 }
2539 }
2540 }
2541
2542 fn apply_diagnostic_update(
2543 &mut self,
2544 server_id: LanguageServerId,
2545 diagnostics: DiagnosticSet,
2546 lamport_timestamp: clock::Lamport,
2547 cx: &mut Context<Self>,
2548 ) {
2549 if lamport_timestamp > self.diagnostics_timestamp {
2550 let ix = self.diagnostics.binary_search_by_key(&server_id, |e| e.0);
2551 if diagnostics.is_empty() {
2552 if let Ok(ix) = ix {
2553 self.diagnostics.remove(ix);
2554 }
2555 } else {
2556 match ix {
2557 Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)),
2558 Ok(ix) => self.diagnostics[ix].1 = diagnostics,
2559 };
2560 }
2561 self.diagnostics_timestamp = lamport_timestamp;
2562 self.non_text_state_update_count += 1;
2563 self.text.lamport_clock.observe(lamport_timestamp);
2564 cx.notify();
2565 cx.emit(BufferEvent::DiagnosticsUpdated);
2566 }
2567 }
2568
2569 fn send_operation(&mut self, operation: Operation, is_local: bool, cx: &mut Context<Self>) {
2570 self.was_changed();
2571 cx.emit(BufferEvent::Operation {
2572 operation,
2573 is_local,
2574 });
2575 }
2576
2577 /// Removes the selections for a given peer.
2578 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut Context<Self>) {
2579 self.remote_selections.remove(&replica_id);
2580 cx.notify();
2581 }
2582
2583 /// Undoes the most recent transaction.
2584 pub fn undo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2585 let was_dirty = self.is_dirty();
2586 let old_version = self.version.clone();
2587
2588 if let Some((transaction_id, operation)) = self.text.undo() {
2589 self.send_operation(Operation::Buffer(operation), true, cx);
2590 self.did_edit(&old_version, was_dirty, cx);
2591 Some(transaction_id)
2592 } else {
2593 None
2594 }
2595 }
2596
2597 /// Manually undoes a specific transaction in the buffer's undo history.
2598 pub fn undo_transaction(
2599 &mut self,
2600 transaction_id: TransactionId,
2601 cx: &mut Context<Self>,
2602 ) -> bool {
2603 let was_dirty = self.is_dirty();
2604 let old_version = self.version.clone();
2605 if let Some(operation) = self.text.undo_transaction(transaction_id) {
2606 self.send_operation(Operation::Buffer(operation), true, cx);
2607 self.did_edit(&old_version, was_dirty, cx);
2608 true
2609 } else {
2610 false
2611 }
2612 }
2613
2614 /// Manually undoes all changes after a given transaction in the buffer's undo history.
2615 pub fn undo_to_transaction(
2616 &mut self,
2617 transaction_id: TransactionId,
2618 cx: &mut Context<Self>,
2619 ) -> bool {
2620 let was_dirty = self.is_dirty();
2621 let old_version = self.version.clone();
2622
2623 let operations = self.text.undo_to_transaction(transaction_id);
2624 let undone = !operations.is_empty();
2625 for operation in operations {
2626 self.send_operation(Operation::Buffer(operation), true, cx);
2627 }
2628 if undone {
2629 self.did_edit(&old_version, was_dirty, cx)
2630 }
2631 undone
2632 }
2633
2634 pub fn undo_operations(&mut self, counts: HashMap<Lamport, u32>, cx: &mut Context<Buffer>) {
2635 let was_dirty = self.is_dirty();
2636 let operation = self.text.undo_operations(counts);
2637 let old_version = self.version.clone();
2638 self.send_operation(Operation::Buffer(operation), true, cx);
2639 self.did_edit(&old_version, was_dirty, cx);
2640 }
2641
2642 /// Manually redoes a specific transaction in the buffer's redo history.
2643 pub fn redo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2644 let was_dirty = self.is_dirty();
2645 let old_version = self.version.clone();
2646
2647 if let Some((transaction_id, operation)) = self.text.redo() {
2648 self.send_operation(Operation::Buffer(operation), true, cx);
2649 self.did_edit(&old_version, was_dirty, cx);
2650 Some(transaction_id)
2651 } else {
2652 None
2653 }
2654 }
2655
2656 /// Manually undoes all changes until a given transaction in the buffer's redo history.
2657 pub fn redo_to_transaction(
2658 &mut self,
2659 transaction_id: TransactionId,
2660 cx: &mut Context<Self>,
2661 ) -> bool {
2662 let was_dirty = self.is_dirty();
2663 let old_version = self.version.clone();
2664
2665 let operations = self.text.redo_to_transaction(transaction_id);
2666 let redone = !operations.is_empty();
2667 for operation in operations {
2668 self.send_operation(Operation::Buffer(operation), true, cx);
2669 }
2670 if redone {
2671 self.did_edit(&old_version, was_dirty, cx)
2672 }
2673 redone
2674 }
2675
2676 /// Override current completion triggers with the user-provided completion triggers.
2677 pub fn set_completion_triggers(
2678 &mut self,
2679 server_id: LanguageServerId,
2680 triggers: BTreeSet<String>,
2681 cx: &mut Context<Self>,
2682 ) {
2683 self.completion_triggers_timestamp = self.text.lamport_clock.tick();
2684 if triggers.is_empty() {
2685 self.completion_triggers_per_language_server
2686 .remove(&server_id);
2687 self.completion_triggers = self
2688 .completion_triggers_per_language_server
2689 .values()
2690 .flat_map(|triggers| triggers.into_iter().cloned())
2691 .collect();
2692 } else {
2693 self.completion_triggers_per_language_server
2694 .insert(server_id, triggers.clone());
2695 self.completion_triggers.extend(triggers.iter().cloned());
2696 }
2697 self.send_operation(
2698 Operation::UpdateCompletionTriggers {
2699 triggers: triggers.into_iter().collect(),
2700 lamport_timestamp: self.completion_triggers_timestamp,
2701 server_id,
2702 },
2703 true,
2704 cx,
2705 );
2706 cx.notify();
2707 }
2708
2709 /// Returns a list of strings which trigger a completion menu for this language.
2710 /// Usually this is driven by LSP server which returns a list of trigger characters for completions.
2711 pub fn completion_triggers(&self) -> &BTreeSet<String> {
2712 &self.completion_triggers
2713 }
2714
2715 /// Call this directly after performing edits to prevent the preview tab
2716 /// from being dismissed by those edits. It causes `should_dismiss_preview`
2717 /// to return false until there are additional edits.
2718 pub fn refresh_preview(&mut self) {
2719 self.preview_version = self.version.clone();
2720 }
2721
2722 /// Whether we should preserve the preview status of a tab containing this buffer.
2723 pub fn preserve_preview(&self) -> bool {
2724 !self.has_edits_since(&self.preview_version)
2725 }
2726}
2727
2728#[doc(hidden)]
2729#[cfg(any(test, feature = "test-support"))]
2730impl Buffer {
2731 pub fn edit_via_marked_text(
2732 &mut self,
2733 marked_string: &str,
2734 autoindent_mode: Option<AutoindentMode>,
2735 cx: &mut Context<Self>,
2736 ) {
2737 let edits = self.edits_for_marked_text(marked_string);
2738 self.edit(edits, autoindent_mode, cx);
2739 }
2740
2741 pub fn set_group_interval(&mut self, group_interval: Duration) {
2742 self.text.set_group_interval(group_interval);
2743 }
2744
2745 pub fn randomly_edit<T>(&mut self, rng: &mut T, old_range_count: usize, cx: &mut Context<Self>)
2746 where
2747 T: rand::Rng,
2748 {
2749 let mut edits: Vec<(Range<usize>, String)> = Vec::new();
2750 let mut last_end = None;
2751 for _ in 0..old_range_count {
2752 if last_end.map_or(false, |last_end| last_end >= self.len()) {
2753 break;
2754 }
2755
2756 let new_start = last_end.map_or(0, |last_end| last_end + 1);
2757 let mut range = self.random_byte_range(new_start, rng);
2758 if rng.gen_bool(0.2) {
2759 mem::swap(&mut range.start, &mut range.end);
2760 }
2761 last_end = Some(range.end);
2762
2763 let new_text_len = rng.gen_range(0..10);
2764 let mut new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
2765 new_text = new_text.to_uppercase();
2766
2767 edits.push((range, new_text));
2768 }
2769 log::info!("mutating buffer {} with {:?}", self.replica_id(), edits);
2770 self.edit(edits, None, cx);
2771 }
2772
2773 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut Context<Self>) {
2774 let was_dirty = self.is_dirty();
2775 let old_version = self.version.clone();
2776
2777 let ops = self.text.randomly_undo_redo(rng);
2778 if !ops.is_empty() {
2779 for op in ops {
2780 self.send_operation(Operation::Buffer(op), true, cx);
2781 self.did_edit(&old_version, was_dirty, cx);
2782 }
2783 }
2784 }
2785}
2786
2787impl EventEmitter<BufferEvent> for Buffer {}
2788
2789impl Deref for Buffer {
2790 type Target = TextBuffer;
2791
2792 fn deref(&self) -> &Self::Target {
2793 &self.text
2794 }
2795}
2796
2797impl BufferSnapshot {
2798 /// Returns [`IndentSize`] for a given line that respects user settings and
2799 /// language preferences.
2800 pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
2801 indent_size_for_line(self, row)
2802 }
2803
2804 /// Returns [`IndentSize`] for a given position that respects user settings
2805 /// and language preferences.
2806 pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &App) -> IndentSize {
2807 let settings = language_settings(
2808 self.language_at(position).map(|l| l.name()),
2809 self.file(),
2810 cx,
2811 );
2812 if settings.hard_tabs {
2813 IndentSize::tab()
2814 } else {
2815 IndentSize::spaces(settings.tab_size.get())
2816 }
2817 }
2818
2819 /// Retrieve the suggested indent size for all of the given rows. The unit of indentation
2820 /// is passed in as `single_indent_size`.
2821 pub fn suggested_indents(
2822 &self,
2823 rows: impl Iterator<Item = u32>,
2824 single_indent_size: IndentSize,
2825 ) -> BTreeMap<u32, IndentSize> {
2826 let mut result = BTreeMap::new();
2827
2828 for row_range in contiguous_ranges(rows, 10) {
2829 let suggestions = match self.suggest_autoindents(row_range.clone()) {
2830 Some(suggestions) => suggestions,
2831 _ => break,
2832 };
2833
2834 for (row, suggestion) in row_range.zip(suggestions) {
2835 let indent_size = if let Some(suggestion) = suggestion {
2836 result
2837 .get(&suggestion.basis_row)
2838 .copied()
2839 .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
2840 .with_delta(suggestion.delta, single_indent_size)
2841 } else {
2842 self.indent_size_for_line(row)
2843 };
2844
2845 result.insert(row, indent_size);
2846 }
2847 }
2848
2849 result
2850 }
2851
2852 fn suggest_autoindents(
2853 &self,
2854 row_range: Range<u32>,
2855 ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
2856 let config = &self.language.as_ref()?.config;
2857 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
2858 let significant_indentation = config.significant_indentation;
2859
2860 // Find the suggested indentation ranges based on the syntax tree.
2861 let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
2862 let end = Point::new(row_range.end, 0);
2863 let range = (start..end).to_offset(&self.text);
2864 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2865 Some(&grammar.indents_config.as_ref()?.query)
2866 });
2867 let indent_configs = matches
2868 .grammars()
2869 .iter()
2870 .map(|grammar| grammar.indents_config.as_ref().unwrap())
2871 .collect::<Vec<_>>();
2872
2873 let mut indent_ranges = Vec::<Range<Point>>::new();
2874 let mut outdent_positions = Vec::<Point>::new();
2875 while let Some(mat) = matches.peek() {
2876 let mut start: Option<Point> = None;
2877 let mut end: Option<Point> = None;
2878 let mut outdent: Option<Point> = None;
2879
2880 let config = &indent_configs[mat.grammar_index];
2881 for capture in mat.captures {
2882 if capture.index == config.indent_capture_ix {
2883 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
2884 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
2885 } else if Some(capture.index) == config.start_capture_ix {
2886 start = Some(Point::from_ts_point(capture.node.end_position()));
2887 } else if Some(capture.index) == config.end_capture_ix {
2888 end = Some(Point::from_ts_point(capture.node.start_position()));
2889 } else if Some(capture.index) == config.outdent_capture_ix {
2890 let point = Point::from_ts_point(capture.node.start_position());
2891 outdent.get_or_insert(point);
2892 outdent_positions.push(point);
2893 }
2894 }
2895
2896 matches.advance();
2897 // in case of significant indentation expand end to outdent position
2898 let end = if significant_indentation {
2899 outdent.or(end)
2900 } else {
2901 end
2902 };
2903 if let Some((start, end)) = start.zip(end) {
2904 if start.row == end.row && (!significant_indentation || start.column < end.column) {
2905 continue;
2906 }
2907 let range = start..end;
2908 match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
2909 Err(ix) => indent_ranges.insert(ix, range),
2910 Ok(ix) => {
2911 let prev_range = &mut indent_ranges[ix];
2912 prev_range.end = prev_range.end.max(range.end);
2913 }
2914 }
2915 }
2916 }
2917
2918 let mut error_ranges = Vec::<Range<Point>>::new();
2919 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2920 grammar.error_query.as_ref()
2921 });
2922 while let Some(mat) = matches.peek() {
2923 let node = mat.captures[0].node;
2924 let start = Point::from_ts_point(node.start_position());
2925 let end = Point::from_ts_point(node.end_position());
2926 let range = start..end;
2927 let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
2928 Ok(ix) | Err(ix) => ix,
2929 };
2930 let mut end_ix = ix;
2931 while let Some(existing_range) = error_ranges.get(end_ix) {
2932 if existing_range.end < end {
2933 end_ix += 1;
2934 } else {
2935 break;
2936 }
2937 }
2938 error_ranges.splice(ix..end_ix, [range]);
2939 matches.advance();
2940 }
2941
2942 // we don't use outdent positions to truncate in case of significant indentation
2943 // rather we use them to expand (handled above)
2944 if !significant_indentation {
2945 outdent_positions.sort();
2946 for outdent_position in outdent_positions {
2947 // find the innermost indent range containing this outdent_position
2948 // set its end to the outdent position
2949 if let Some(range_to_truncate) = indent_ranges
2950 .iter_mut()
2951 .filter(|indent_range| indent_range.contains(&outdent_position))
2952 .next_back()
2953 {
2954 range_to_truncate.end = outdent_position;
2955 }
2956 }
2957 }
2958
2959 // Find the suggested indentation increases and decreased based on regexes.
2960 let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
2961 self.for_each_line(
2962 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
2963 ..Point::new(row_range.end, 0),
2964 |row, line| {
2965 if config
2966 .decrease_indent_pattern
2967 .as_ref()
2968 .map_or(false, |regex| regex.is_match(line))
2969 {
2970 indent_change_rows.push((row, Ordering::Less));
2971 }
2972 if config
2973 .increase_indent_pattern
2974 .as_ref()
2975 .map_or(false, |regex| regex.is_match(line))
2976 {
2977 indent_change_rows.push((row + 1, Ordering::Greater));
2978 }
2979 },
2980 );
2981
2982 let mut indent_changes = indent_change_rows.into_iter().peekable();
2983 let mut prev_row = if config.auto_indent_using_last_non_empty_line {
2984 prev_non_blank_row.unwrap_or(0)
2985 } else {
2986 row_range.start.saturating_sub(1)
2987 };
2988 let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
2989 Some(row_range.map(move |row| {
2990 let row_start = Point::new(row, self.indent_size_for_line(row).len);
2991
2992 let mut indent_from_prev_row = false;
2993 let mut outdent_from_prev_row = false;
2994 let mut outdent_to_row = u32::MAX;
2995 let mut from_regex = false;
2996
2997 while let Some((indent_row, delta)) = indent_changes.peek() {
2998 match indent_row.cmp(&row) {
2999 Ordering::Equal => match delta {
3000 Ordering::Less => {
3001 from_regex = true;
3002 outdent_from_prev_row = true
3003 }
3004 Ordering::Greater => {
3005 indent_from_prev_row = true;
3006 from_regex = true
3007 }
3008 _ => {}
3009 },
3010
3011 Ordering::Greater => break,
3012 Ordering::Less => {}
3013 }
3014
3015 indent_changes.next();
3016 }
3017
3018 for range in &indent_ranges {
3019 if range.start.row >= row {
3020 break;
3021 }
3022 if range.start.row == prev_row && range.end > row_start {
3023 indent_from_prev_row = true;
3024 }
3025 if significant_indentation && self.is_line_blank(row) && range.start.row == prev_row
3026 {
3027 indent_from_prev_row = true;
3028 }
3029 if !significant_indentation || !self.is_line_blank(row) {
3030 if range.end > prev_row_start && range.end <= row_start {
3031 outdent_to_row = outdent_to_row.min(range.start.row);
3032 }
3033 }
3034 }
3035
3036 let within_error = error_ranges
3037 .iter()
3038 .any(|e| e.start.row < row && e.end > row_start);
3039
3040 let suggestion = if outdent_to_row == prev_row
3041 || (outdent_from_prev_row && indent_from_prev_row)
3042 {
3043 Some(IndentSuggestion {
3044 basis_row: prev_row,
3045 delta: Ordering::Equal,
3046 within_error: within_error && !from_regex,
3047 })
3048 } else if indent_from_prev_row {
3049 Some(IndentSuggestion {
3050 basis_row: prev_row,
3051 delta: Ordering::Greater,
3052 within_error: within_error && !from_regex,
3053 })
3054 } else if outdent_to_row < prev_row {
3055 Some(IndentSuggestion {
3056 basis_row: outdent_to_row,
3057 delta: Ordering::Equal,
3058 within_error: within_error && !from_regex,
3059 })
3060 } else if outdent_from_prev_row {
3061 Some(IndentSuggestion {
3062 basis_row: prev_row,
3063 delta: Ordering::Less,
3064 within_error: within_error && !from_regex,
3065 })
3066 } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
3067 {
3068 Some(IndentSuggestion {
3069 basis_row: prev_row,
3070 delta: Ordering::Equal,
3071 within_error: within_error && !from_regex,
3072 })
3073 } else {
3074 None
3075 };
3076
3077 prev_row = row;
3078 prev_row_start = row_start;
3079 suggestion
3080 }))
3081 }
3082
3083 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
3084 while row > 0 {
3085 row -= 1;
3086 if !self.is_line_blank(row) {
3087 return Some(row);
3088 }
3089 }
3090 None
3091 }
3092
3093 fn get_highlights(&self, range: Range<usize>) -> (SyntaxMapCaptures, Vec<HighlightMap>) {
3094 let captures = self.syntax.captures(range, &self.text, |grammar| {
3095 grammar.highlights_query.as_ref()
3096 });
3097 let highlight_maps = captures
3098 .grammars()
3099 .iter()
3100 .map(|grammar| grammar.highlight_map())
3101 .collect();
3102 (captures, highlight_maps)
3103 }
3104
3105 /// Iterates over chunks of text in the given range of the buffer. Text is chunked
3106 /// in an arbitrary way due to being stored in a [`Rope`](text::Rope). The text is also
3107 /// returned in chunks where each chunk has a single syntax highlighting style and
3108 /// diagnostic status.
3109 pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks {
3110 let range = range.start.to_offset(self)..range.end.to_offset(self);
3111
3112 let mut syntax = None;
3113 if language_aware {
3114 syntax = Some(self.get_highlights(range.clone()));
3115 }
3116 // We want to look at diagnostic spans only when iterating over language-annotated chunks.
3117 let diagnostics = language_aware;
3118 BufferChunks::new(self.text.as_rope(), range, syntax, diagnostics, Some(self))
3119 }
3120
3121 pub fn highlighted_text_for_range<T: ToOffset>(
3122 &self,
3123 range: Range<T>,
3124 override_style: Option<HighlightStyle>,
3125 syntax_theme: &SyntaxTheme,
3126 ) -> HighlightedText {
3127 HighlightedText::from_buffer_range(
3128 range,
3129 &self.text,
3130 &self.syntax,
3131 override_style,
3132 syntax_theme,
3133 )
3134 }
3135
3136 /// Invokes the given callback for each line of text in the given range of the buffer.
3137 /// Uses callback to avoid allocating a string for each line.
3138 fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
3139 let mut line = String::new();
3140 let mut row = range.start.row;
3141 for chunk in self
3142 .as_rope()
3143 .chunks_in_range(range.to_offset(self))
3144 .chain(["\n"])
3145 {
3146 for (newline_ix, text) in chunk.split('\n').enumerate() {
3147 if newline_ix > 0 {
3148 callback(row, &line);
3149 row += 1;
3150 line.clear();
3151 }
3152 line.push_str(text);
3153 }
3154 }
3155 }
3156
3157 /// Iterates over every [`SyntaxLayer`] in the buffer.
3158 pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayer> + '_ {
3159 self.syntax
3160 .layers_for_range(0..self.len(), &self.text, true)
3161 }
3162
3163 pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayer> {
3164 let offset = position.to_offset(self);
3165 self.syntax
3166 .layers_for_range(offset..offset, &self.text, false)
3167 .filter(|l| l.node().end_byte() > offset)
3168 .last()
3169 }
3170
3171 pub fn smallest_syntax_layer_containing<D: ToOffset>(
3172 &self,
3173 range: Range<D>,
3174 ) -> Option<SyntaxLayer> {
3175 let range = range.to_offset(self);
3176 return self
3177 .syntax
3178 .layers_for_range(range, &self.text, false)
3179 .max_by(|a, b| {
3180 if a.depth != b.depth {
3181 a.depth.cmp(&b.depth)
3182 } else if a.offset.0 != b.offset.0 {
3183 a.offset.0.cmp(&b.offset.0)
3184 } else {
3185 a.node().end_byte().cmp(&b.node().end_byte()).reverse()
3186 }
3187 });
3188 }
3189
3190 /// Returns the main [`Language`].
3191 pub fn language(&self) -> Option<&Arc<Language>> {
3192 self.language.as_ref()
3193 }
3194
3195 /// Returns the [`Language`] at the given location.
3196 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
3197 self.syntax_layer_at(position)
3198 .map(|info| info.language)
3199 .or(self.language.as_ref())
3200 }
3201
3202 /// Returns the settings for the language at the given location.
3203 pub fn settings_at<'a, D: ToOffset>(
3204 &'a self,
3205 position: D,
3206 cx: &'a App,
3207 ) -> Cow<'a, LanguageSettings> {
3208 language_settings(
3209 self.language_at(position).map(|l| l.name()),
3210 self.file.as_ref(),
3211 cx,
3212 )
3213 }
3214
3215 pub fn char_classifier_at<T: ToOffset>(&self, point: T) -> CharClassifier {
3216 CharClassifier::new(self.language_scope_at(point))
3217 }
3218
3219 /// Returns the [`LanguageScope`] at the given location.
3220 pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
3221 let offset = position.to_offset(self);
3222 let mut scope = None;
3223 let mut smallest_range_and_depth: Option<(Range<usize>, usize)> = None;
3224
3225 // Use the layer that has the smallest node intersecting the given point.
3226 for layer in self
3227 .syntax
3228 .layers_for_range(offset..offset, &self.text, false)
3229 {
3230 let mut cursor = layer.node().walk();
3231
3232 let mut range = None;
3233 loop {
3234 let child_range = cursor.node().byte_range();
3235 if !child_range.contains(&offset) {
3236 break;
3237 }
3238
3239 range = Some(child_range);
3240 if cursor.goto_first_child_for_byte(offset).is_none() {
3241 break;
3242 }
3243 }
3244
3245 if let Some(range) = range {
3246 if smallest_range_and_depth.as_ref().map_or(
3247 true,
3248 |(smallest_range, smallest_range_depth)| {
3249 if layer.depth > *smallest_range_depth {
3250 true
3251 } else if layer.depth == *smallest_range_depth {
3252 range.len() < smallest_range.len()
3253 } else {
3254 false
3255 }
3256 },
3257 ) {
3258 smallest_range_and_depth = Some((range, layer.depth));
3259 scope = Some(LanguageScope {
3260 language: layer.language.clone(),
3261 override_id: layer.override_id(offset, &self.text),
3262 });
3263 }
3264 }
3265 }
3266
3267 scope.or_else(|| {
3268 self.language.clone().map(|language| LanguageScope {
3269 language,
3270 override_id: None,
3271 })
3272 })
3273 }
3274
3275 /// Returns a tuple of the range and character kind of the word
3276 /// surrounding the given position.
3277 pub fn surrounding_word<T: ToOffset>(&self, start: T) -> (Range<usize>, Option<CharKind>) {
3278 let mut start = start.to_offset(self);
3279 let mut end = start;
3280 let mut next_chars = self.chars_at(start).peekable();
3281 let mut prev_chars = self.reversed_chars_at(start).peekable();
3282
3283 let classifier = self.char_classifier_at(start);
3284 let word_kind = cmp::max(
3285 prev_chars.peek().copied().map(|c| classifier.kind(c)),
3286 next_chars.peek().copied().map(|c| classifier.kind(c)),
3287 );
3288
3289 for ch in prev_chars {
3290 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3291 start -= ch.len_utf8();
3292 } else {
3293 break;
3294 }
3295 }
3296
3297 for ch in next_chars {
3298 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3299 end += ch.len_utf8();
3300 } else {
3301 break;
3302 }
3303 }
3304
3305 (start..end, word_kind)
3306 }
3307
3308 /// Returns the closest syntax node enclosing the given range.
3309 pub fn syntax_ancestor<'a, T: ToOffset>(
3310 &'a self,
3311 range: Range<T>,
3312 ) -> Option<tree_sitter::Node<'a>> {
3313 let range = range.start.to_offset(self)..range.end.to_offset(self);
3314 let mut result: Option<tree_sitter::Node<'a>> = None;
3315 'outer: for layer in self
3316 .syntax
3317 .layers_for_range(range.clone(), &self.text, true)
3318 {
3319 let mut cursor = layer.node().walk();
3320
3321 // Descend to the first leaf that touches the start of the range.
3322 //
3323 // If the range is non-empty and the current node ends exactly at the start,
3324 // move to the next sibling to find a node that extends beyond the start.
3325 //
3326 // If the range is empty and the current node starts after the range position,
3327 // move to the previous sibling to find the node that contains the position.
3328 while cursor.goto_first_child_for_byte(range.start).is_some() {
3329 if !range.is_empty() && cursor.node().end_byte() == range.start {
3330 cursor.goto_next_sibling();
3331 }
3332 if range.is_empty() && cursor.node().start_byte() > range.start {
3333 cursor.goto_previous_sibling();
3334 }
3335 }
3336
3337 // Ascend to the smallest ancestor that strictly contains the range.
3338 loop {
3339 let node_range = cursor.node().byte_range();
3340 if node_range.start <= range.start
3341 && node_range.end >= range.end
3342 && node_range.len() > range.len()
3343 {
3344 break;
3345 }
3346 if !cursor.goto_parent() {
3347 continue 'outer;
3348 }
3349 }
3350
3351 let left_node = cursor.node();
3352 let mut layer_result = left_node;
3353
3354 // For an empty range, try to find another node immediately to the right of the range.
3355 if left_node.end_byte() == range.start {
3356 let mut right_node = None;
3357 while !cursor.goto_next_sibling() {
3358 if !cursor.goto_parent() {
3359 break;
3360 }
3361 }
3362
3363 while cursor.node().start_byte() == range.start {
3364 right_node = Some(cursor.node());
3365 if !cursor.goto_first_child() {
3366 break;
3367 }
3368 }
3369
3370 // If there is a candidate node on both sides of the (empty) range, then
3371 // decide between the two by favoring a named node over an anonymous token.
3372 // If both nodes are the same in that regard, favor the right one.
3373 if let Some(right_node) = right_node {
3374 if right_node.is_named() || !left_node.is_named() {
3375 layer_result = right_node;
3376 }
3377 }
3378 }
3379
3380 if let Some(previous_result) = &result {
3381 if previous_result.byte_range().len() < layer_result.byte_range().len() {
3382 continue;
3383 }
3384 }
3385 result = Some(layer_result);
3386 }
3387
3388 result
3389 }
3390
3391 /// Returns the root syntax node within the given row
3392 pub fn syntax_root_ancestor(&self, position: Anchor) -> Option<tree_sitter::Node> {
3393 let start_offset = position.to_offset(self);
3394
3395 let row = self.summary_for_anchor::<text::PointUtf16>(&position).row as usize;
3396
3397 let layer = self
3398 .syntax
3399 .layers_for_range(start_offset..start_offset, &self.text, true)
3400 .next()?;
3401
3402 let mut cursor = layer.node().walk();
3403
3404 // Descend to the first leaf that touches the start of the range.
3405 while cursor.goto_first_child_for_byte(start_offset).is_some() {
3406 if cursor.node().end_byte() == start_offset {
3407 cursor.goto_next_sibling();
3408 }
3409 }
3410
3411 // Ascend to the root node within the same row.
3412 while cursor.goto_parent() {
3413 if cursor.node().start_position().row != row {
3414 break;
3415 }
3416 }
3417
3418 return Some(cursor.node());
3419 }
3420
3421 /// Returns the outline for the buffer.
3422 ///
3423 /// This method allows passing an optional [`SyntaxTheme`] to
3424 /// syntax-highlight the returned symbols.
3425 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Option<Outline<Anchor>> {
3426 self.outline_items_containing(0..self.len(), true, theme)
3427 .map(Outline::new)
3428 }
3429
3430 /// Returns all the symbols that contain the given position.
3431 ///
3432 /// This method allows passing an optional [`SyntaxTheme`] to
3433 /// syntax-highlight the returned symbols.
3434 pub fn symbols_containing<T: ToOffset>(
3435 &self,
3436 position: T,
3437 theme: Option<&SyntaxTheme>,
3438 ) -> Option<Vec<OutlineItem<Anchor>>> {
3439 let position = position.to_offset(self);
3440 let mut items = self.outline_items_containing(
3441 position.saturating_sub(1)..self.len().min(position + 1),
3442 false,
3443 theme,
3444 )?;
3445 let mut prev_depth = None;
3446 items.retain(|item| {
3447 let result = prev_depth.map_or(true, |prev_depth| item.depth > prev_depth);
3448 prev_depth = Some(item.depth);
3449 result
3450 });
3451 Some(items)
3452 }
3453
3454 pub fn outline_range_containing<T: ToOffset>(&self, range: Range<T>) -> Option<Range<Point>> {
3455 let range = range.to_offset(self);
3456 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3457 grammar.outline_config.as_ref().map(|c| &c.query)
3458 });
3459 let configs = matches
3460 .grammars()
3461 .iter()
3462 .map(|g| g.outline_config.as_ref().unwrap())
3463 .collect::<Vec<_>>();
3464
3465 while let Some(mat) = matches.peek() {
3466 let config = &configs[mat.grammar_index];
3467 let containing_item_node = maybe!({
3468 let item_node = mat.captures.iter().find_map(|cap| {
3469 if cap.index == config.item_capture_ix {
3470 Some(cap.node)
3471 } else {
3472 None
3473 }
3474 })?;
3475
3476 let item_byte_range = item_node.byte_range();
3477 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3478 None
3479 } else {
3480 Some(item_node)
3481 }
3482 });
3483
3484 if let Some(item_node) = containing_item_node {
3485 return Some(
3486 Point::from_ts_point(item_node.start_position())
3487 ..Point::from_ts_point(item_node.end_position()),
3488 );
3489 }
3490
3491 matches.advance();
3492 }
3493 None
3494 }
3495
3496 pub fn outline_items_containing<T: ToOffset>(
3497 &self,
3498 range: Range<T>,
3499 include_extra_context: bool,
3500 theme: Option<&SyntaxTheme>,
3501 ) -> Option<Vec<OutlineItem<Anchor>>> {
3502 let range = range.to_offset(self);
3503 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3504 grammar.outline_config.as_ref().map(|c| &c.query)
3505 });
3506 let configs = matches
3507 .grammars()
3508 .iter()
3509 .map(|g| g.outline_config.as_ref().unwrap())
3510 .collect::<Vec<_>>();
3511
3512 let mut items = Vec::new();
3513 let mut annotation_row_ranges: Vec<Range<u32>> = Vec::new();
3514 while let Some(mat) = matches.peek() {
3515 let config = &configs[mat.grammar_index];
3516 if let Some(item) =
3517 self.next_outline_item(config, &mat, &range, include_extra_context, theme)
3518 {
3519 items.push(item);
3520 } else if let Some(capture) = mat
3521 .captures
3522 .iter()
3523 .find(|capture| Some(capture.index) == config.annotation_capture_ix)
3524 {
3525 let capture_range = capture.node.start_position()..capture.node.end_position();
3526 let mut capture_row_range =
3527 capture_range.start.row as u32..capture_range.end.row as u32;
3528 if capture_range.end.row > capture_range.start.row && capture_range.end.column == 0
3529 {
3530 capture_row_range.end -= 1;
3531 }
3532 if let Some(last_row_range) = annotation_row_ranges.last_mut() {
3533 if last_row_range.end >= capture_row_range.start.saturating_sub(1) {
3534 last_row_range.end = capture_row_range.end;
3535 } else {
3536 annotation_row_ranges.push(capture_row_range);
3537 }
3538 } else {
3539 annotation_row_ranges.push(capture_row_range);
3540 }
3541 }
3542 matches.advance();
3543 }
3544
3545 items.sort_by_key(|item| (item.range.start, Reverse(item.range.end)));
3546
3547 // Assign depths based on containment relationships and convert to anchors.
3548 let mut item_ends_stack = Vec::<Point>::new();
3549 let mut anchor_items = Vec::new();
3550 let mut annotation_row_ranges = annotation_row_ranges.into_iter().peekable();
3551 for item in items {
3552 while let Some(last_end) = item_ends_stack.last().copied() {
3553 if last_end < item.range.end {
3554 item_ends_stack.pop();
3555 } else {
3556 break;
3557 }
3558 }
3559
3560 let mut annotation_row_range = None;
3561 while let Some(next_annotation_row_range) = annotation_row_ranges.peek() {
3562 let row_preceding_item = item.range.start.row.saturating_sub(1);
3563 if next_annotation_row_range.end < row_preceding_item {
3564 annotation_row_ranges.next();
3565 } else {
3566 if next_annotation_row_range.end == row_preceding_item {
3567 annotation_row_range = Some(next_annotation_row_range.clone());
3568 annotation_row_ranges.next();
3569 }
3570 break;
3571 }
3572 }
3573
3574 anchor_items.push(OutlineItem {
3575 depth: item_ends_stack.len(),
3576 range: self.anchor_after(item.range.start)..self.anchor_before(item.range.end),
3577 text: item.text,
3578 highlight_ranges: item.highlight_ranges,
3579 name_ranges: item.name_ranges,
3580 body_range: item.body_range.map(|body_range| {
3581 self.anchor_after(body_range.start)..self.anchor_before(body_range.end)
3582 }),
3583 annotation_range: annotation_row_range.map(|annotation_range| {
3584 self.anchor_after(Point::new(annotation_range.start, 0))
3585 ..self.anchor_before(Point::new(
3586 annotation_range.end,
3587 self.line_len(annotation_range.end),
3588 ))
3589 }),
3590 });
3591 item_ends_stack.push(item.range.end);
3592 }
3593
3594 Some(anchor_items)
3595 }
3596
3597 fn next_outline_item(
3598 &self,
3599 config: &OutlineConfig,
3600 mat: &SyntaxMapMatch,
3601 range: &Range<usize>,
3602 include_extra_context: bool,
3603 theme: Option<&SyntaxTheme>,
3604 ) -> Option<OutlineItem<Point>> {
3605 let item_node = mat.captures.iter().find_map(|cap| {
3606 if cap.index == config.item_capture_ix {
3607 Some(cap.node)
3608 } else {
3609 None
3610 }
3611 })?;
3612
3613 let item_byte_range = item_node.byte_range();
3614 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3615 return None;
3616 }
3617 let item_point_range = Point::from_ts_point(item_node.start_position())
3618 ..Point::from_ts_point(item_node.end_position());
3619
3620 let mut open_point = None;
3621 let mut close_point = None;
3622 let mut buffer_ranges = Vec::new();
3623 for capture in mat.captures {
3624 let node_is_name;
3625 if capture.index == config.name_capture_ix {
3626 node_is_name = true;
3627 } else if Some(capture.index) == config.context_capture_ix
3628 || (Some(capture.index) == config.extra_context_capture_ix && include_extra_context)
3629 {
3630 node_is_name = false;
3631 } else {
3632 if Some(capture.index) == config.open_capture_ix {
3633 open_point = Some(Point::from_ts_point(capture.node.end_position()));
3634 } else if Some(capture.index) == config.close_capture_ix {
3635 close_point = Some(Point::from_ts_point(capture.node.start_position()));
3636 }
3637
3638 continue;
3639 }
3640
3641 let mut range = capture.node.start_byte()..capture.node.end_byte();
3642 let start = capture.node.start_position();
3643 if capture.node.end_position().row > start.row {
3644 range.end = range.start + self.line_len(start.row as u32) as usize - start.column;
3645 }
3646
3647 if !range.is_empty() {
3648 buffer_ranges.push((range, node_is_name));
3649 }
3650 }
3651 if buffer_ranges.is_empty() {
3652 return None;
3653 }
3654 let mut text = String::new();
3655 let mut highlight_ranges = Vec::new();
3656 let mut name_ranges = Vec::new();
3657 let mut chunks = self.chunks(
3658 buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end,
3659 true,
3660 );
3661 let mut last_buffer_range_end = 0;
3662
3663 for (buffer_range, is_name) in buffer_ranges {
3664 let space_added = !text.is_empty() && buffer_range.start > last_buffer_range_end;
3665 if space_added {
3666 text.push(' ');
3667 }
3668 let before_append_len = text.len();
3669 let mut offset = buffer_range.start;
3670 chunks.seek(buffer_range.clone());
3671 for mut chunk in chunks.by_ref() {
3672 if chunk.text.len() > buffer_range.end - offset {
3673 chunk.text = &chunk.text[0..(buffer_range.end - offset)];
3674 offset = buffer_range.end;
3675 } else {
3676 offset += chunk.text.len();
3677 }
3678 let style = chunk
3679 .syntax_highlight_id
3680 .zip(theme)
3681 .and_then(|(highlight, theme)| highlight.style(theme));
3682 if let Some(style) = style {
3683 let start = text.len();
3684 let end = start + chunk.text.len();
3685 highlight_ranges.push((start..end, style));
3686 }
3687 text.push_str(chunk.text);
3688 if offset >= buffer_range.end {
3689 break;
3690 }
3691 }
3692 if is_name {
3693 let after_append_len = text.len();
3694 let start = if space_added && !name_ranges.is_empty() {
3695 before_append_len - 1
3696 } else {
3697 before_append_len
3698 };
3699 name_ranges.push(start..after_append_len);
3700 }
3701 last_buffer_range_end = buffer_range.end;
3702 }
3703
3704 Some(OutlineItem {
3705 depth: 0, // We'll calculate the depth later
3706 range: item_point_range,
3707 text,
3708 highlight_ranges,
3709 name_ranges,
3710 body_range: open_point.zip(close_point).map(|(start, end)| start..end),
3711 annotation_range: None,
3712 })
3713 }
3714
3715 pub fn function_body_fold_ranges<T: ToOffset>(
3716 &self,
3717 within: Range<T>,
3718 ) -> impl Iterator<Item = Range<usize>> + '_ {
3719 self.text_object_ranges(within, TreeSitterOptions::default())
3720 .filter_map(|(range, obj)| (obj == TextObject::InsideFunction).then_some(range))
3721 }
3722
3723 /// For each grammar in the language, runs the provided
3724 /// [`tree_sitter::Query`] against the given range.
3725 pub fn matches(
3726 &self,
3727 range: Range<usize>,
3728 query: fn(&Grammar) -> Option<&tree_sitter::Query>,
3729 ) -> SyntaxMapMatches {
3730 self.syntax.matches(range, self, query)
3731 }
3732
3733 pub fn all_bracket_ranges(
3734 &self,
3735 range: Range<usize>,
3736 ) -> impl Iterator<Item = BracketMatch> + '_ {
3737 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3738 grammar.brackets_config.as_ref().map(|c| &c.query)
3739 });
3740 let configs = matches
3741 .grammars()
3742 .iter()
3743 .map(|grammar| grammar.brackets_config.as_ref().unwrap())
3744 .collect::<Vec<_>>();
3745
3746 iter::from_fn(move || {
3747 while let Some(mat) = matches.peek() {
3748 let mut open = None;
3749 let mut close = None;
3750 let config = &configs[mat.grammar_index];
3751 let pattern = &config.patterns[mat.pattern_index];
3752 for capture in mat.captures {
3753 if capture.index == config.open_capture_ix {
3754 open = Some(capture.node.byte_range());
3755 } else if capture.index == config.close_capture_ix {
3756 close = Some(capture.node.byte_range());
3757 }
3758 }
3759
3760 matches.advance();
3761
3762 let Some((open_range, close_range)) = open.zip(close) else {
3763 continue;
3764 };
3765
3766 let bracket_range = open_range.start..=close_range.end;
3767 if !bracket_range.overlaps(&range) {
3768 continue;
3769 }
3770
3771 return Some(BracketMatch {
3772 open_range,
3773 close_range,
3774 newline_only: pattern.newline_only,
3775 });
3776 }
3777 None
3778 })
3779 }
3780
3781 /// Returns bracket range pairs overlapping or adjacent to `range`
3782 pub fn bracket_ranges<T: ToOffset>(
3783 &self,
3784 range: Range<T>,
3785 ) -> impl Iterator<Item = BracketMatch> + '_ {
3786 // Find bracket pairs that *inclusively* contain the given range.
3787 let range = range.start.to_offset(self).saturating_sub(1)
3788 ..self.len().min(range.end.to_offset(self) + 1);
3789 self.all_bracket_ranges(range)
3790 .filter(|pair| !pair.newline_only)
3791 }
3792
3793 pub fn text_object_ranges<T: ToOffset>(
3794 &self,
3795 range: Range<T>,
3796 options: TreeSitterOptions,
3797 ) -> impl Iterator<Item = (Range<usize>, TextObject)> + '_ {
3798 let range = range.start.to_offset(self).saturating_sub(1)
3799 ..self.len().min(range.end.to_offset(self) + 1);
3800
3801 let mut matches =
3802 self.syntax
3803 .matches_with_options(range.clone(), &self.text, options, |grammar| {
3804 grammar.text_object_config.as_ref().map(|c| &c.query)
3805 });
3806
3807 let configs = matches
3808 .grammars()
3809 .iter()
3810 .map(|grammar| grammar.text_object_config.as_ref())
3811 .collect::<Vec<_>>();
3812
3813 let mut captures = Vec::<(Range<usize>, TextObject)>::new();
3814
3815 iter::from_fn(move || {
3816 loop {
3817 while let Some(capture) = captures.pop() {
3818 if capture.0.overlaps(&range) {
3819 return Some(capture);
3820 }
3821 }
3822
3823 let mat = matches.peek()?;
3824
3825 let Some(config) = configs[mat.grammar_index].as_ref() else {
3826 matches.advance();
3827 continue;
3828 };
3829
3830 for capture in mat.captures {
3831 let Some(ix) = config
3832 .text_objects_by_capture_ix
3833 .binary_search_by_key(&capture.index, |e| e.0)
3834 .ok()
3835 else {
3836 continue;
3837 };
3838 let text_object = config.text_objects_by_capture_ix[ix].1;
3839 let byte_range = capture.node.byte_range();
3840
3841 let mut found = false;
3842 for (range, existing) in captures.iter_mut() {
3843 if existing == &text_object {
3844 range.start = range.start.min(byte_range.start);
3845 range.end = range.end.max(byte_range.end);
3846 found = true;
3847 break;
3848 }
3849 }
3850
3851 if !found {
3852 captures.push((byte_range, text_object));
3853 }
3854 }
3855
3856 matches.advance();
3857 }
3858 })
3859 }
3860
3861 /// Returns enclosing bracket ranges containing the given range
3862 pub fn enclosing_bracket_ranges<T: ToOffset>(
3863 &self,
3864 range: Range<T>,
3865 ) -> impl Iterator<Item = BracketMatch> + '_ {
3866 let range = range.start.to_offset(self)..range.end.to_offset(self);
3867
3868 self.bracket_ranges(range.clone()).filter(move |pair| {
3869 pair.open_range.start <= range.start && pair.close_range.end >= range.end
3870 })
3871 }
3872
3873 /// Returns the smallest enclosing bracket ranges containing the given range or None if no brackets contain range
3874 ///
3875 /// Can optionally pass a range_filter to filter the ranges of brackets to consider
3876 pub fn innermost_enclosing_bracket_ranges<T: ToOffset>(
3877 &self,
3878 range: Range<T>,
3879 range_filter: Option<&dyn Fn(Range<usize>, Range<usize>) -> bool>,
3880 ) -> Option<(Range<usize>, Range<usize>)> {
3881 let range = range.start.to_offset(self)..range.end.to_offset(self);
3882
3883 // Get the ranges of the innermost pair of brackets.
3884 let mut result: Option<(Range<usize>, Range<usize>)> = None;
3885
3886 for pair in self.enclosing_bracket_ranges(range.clone()) {
3887 if let Some(range_filter) = range_filter {
3888 if !range_filter(pair.open_range.clone(), pair.close_range.clone()) {
3889 continue;
3890 }
3891 }
3892
3893 let len = pair.close_range.end - pair.open_range.start;
3894
3895 if let Some((existing_open, existing_close)) = &result {
3896 let existing_len = existing_close.end - existing_open.start;
3897 if len > existing_len {
3898 continue;
3899 }
3900 }
3901
3902 result = Some((pair.open_range, pair.close_range));
3903 }
3904
3905 result
3906 }
3907
3908 /// Returns anchor ranges for any matches of the redaction query.
3909 /// The buffer can be associated with multiple languages, and the redaction query associated with each
3910 /// will be run on the relevant section of the buffer.
3911 pub fn redacted_ranges<T: ToOffset>(
3912 &self,
3913 range: Range<T>,
3914 ) -> impl Iterator<Item = Range<usize>> + '_ {
3915 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
3916 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
3917 grammar
3918 .redactions_config
3919 .as_ref()
3920 .map(|config| &config.query)
3921 });
3922
3923 let configs = syntax_matches
3924 .grammars()
3925 .iter()
3926 .map(|grammar| grammar.redactions_config.as_ref())
3927 .collect::<Vec<_>>();
3928
3929 iter::from_fn(move || {
3930 let redacted_range = syntax_matches
3931 .peek()
3932 .and_then(|mat| {
3933 configs[mat.grammar_index].and_then(|config| {
3934 mat.captures
3935 .iter()
3936 .find(|capture| capture.index == config.redaction_capture_ix)
3937 })
3938 })
3939 .map(|mat| mat.node.byte_range());
3940 syntax_matches.advance();
3941 redacted_range
3942 })
3943 }
3944
3945 pub fn injections_intersecting_range<T: ToOffset>(
3946 &self,
3947 range: Range<T>,
3948 ) -> impl Iterator<Item = (Range<usize>, &Arc<Language>)> + '_ {
3949 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
3950
3951 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
3952 grammar
3953 .injection_config
3954 .as_ref()
3955 .map(|config| &config.query)
3956 });
3957
3958 let configs = syntax_matches
3959 .grammars()
3960 .iter()
3961 .map(|grammar| grammar.injection_config.as_ref())
3962 .collect::<Vec<_>>();
3963
3964 iter::from_fn(move || {
3965 let ranges = syntax_matches.peek().and_then(|mat| {
3966 let config = &configs[mat.grammar_index]?;
3967 let content_capture_range = mat.captures.iter().find_map(|capture| {
3968 if capture.index == config.content_capture_ix {
3969 Some(capture.node.byte_range())
3970 } else {
3971 None
3972 }
3973 })?;
3974 let language = self.language_at(content_capture_range.start)?;
3975 Some((content_capture_range, language))
3976 });
3977 syntax_matches.advance();
3978 ranges
3979 })
3980 }
3981
3982 pub fn runnable_ranges(
3983 &self,
3984 offset_range: Range<usize>,
3985 ) -> impl Iterator<Item = RunnableRange> + '_ {
3986 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
3987 grammar.runnable_config.as_ref().map(|config| &config.query)
3988 });
3989
3990 let test_configs = syntax_matches
3991 .grammars()
3992 .iter()
3993 .map(|grammar| grammar.runnable_config.as_ref())
3994 .collect::<Vec<_>>();
3995
3996 iter::from_fn(move || {
3997 loop {
3998 let mat = syntax_matches.peek()?;
3999
4000 let test_range = test_configs[mat.grammar_index].and_then(|test_configs| {
4001 let mut run_range = None;
4002 let full_range = mat.captures.iter().fold(
4003 Range {
4004 start: usize::MAX,
4005 end: 0,
4006 },
4007 |mut acc, next| {
4008 let byte_range = next.node.byte_range();
4009 if acc.start > byte_range.start {
4010 acc.start = byte_range.start;
4011 }
4012 if acc.end < byte_range.end {
4013 acc.end = byte_range.end;
4014 }
4015 acc
4016 },
4017 );
4018 if full_range.start > full_range.end {
4019 // We did not find a full spanning range of this match.
4020 return None;
4021 }
4022 let extra_captures: SmallVec<[_; 1]> =
4023 SmallVec::from_iter(mat.captures.iter().filter_map(|capture| {
4024 test_configs
4025 .extra_captures
4026 .get(capture.index as usize)
4027 .cloned()
4028 .and_then(|tag_name| match tag_name {
4029 RunnableCapture::Named(name) => {
4030 Some((capture.node.byte_range(), name))
4031 }
4032 RunnableCapture::Run => {
4033 let _ = run_range.insert(capture.node.byte_range());
4034 None
4035 }
4036 })
4037 }));
4038 let run_range = run_range?;
4039 let tags = test_configs
4040 .query
4041 .property_settings(mat.pattern_index)
4042 .iter()
4043 .filter_map(|property| {
4044 if *property.key == *"tag" {
4045 property
4046 .value
4047 .as_ref()
4048 .map(|value| RunnableTag(value.to_string().into()))
4049 } else {
4050 None
4051 }
4052 })
4053 .collect();
4054 let extra_captures = extra_captures
4055 .into_iter()
4056 .map(|(range, name)| {
4057 (
4058 name.to_string(),
4059 self.text_for_range(range.clone()).collect::<String>(),
4060 )
4061 })
4062 .collect();
4063 // All tags should have the same range.
4064 Some(RunnableRange {
4065 run_range,
4066 full_range,
4067 runnable: Runnable {
4068 tags,
4069 language: mat.language,
4070 buffer: self.remote_id(),
4071 },
4072 extra_captures,
4073 buffer_id: self.remote_id(),
4074 })
4075 });
4076
4077 syntax_matches.advance();
4078 if test_range.is_some() {
4079 // It's fine for us to short-circuit on .peek()? returning None. We don't want to return None from this iter if we
4080 // had a capture that did not contain a run marker, hence we'll just loop around for the next capture.
4081 return test_range;
4082 }
4083 }
4084 })
4085 }
4086
4087 /// Returns selections for remote peers intersecting the given range.
4088 #[allow(clippy::type_complexity)]
4089 pub fn selections_in_range(
4090 &self,
4091 range: Range<Anchor>,
4092 include_local: bool,
4093 ) -> impl Iterator<
4094 Item = (
4095 ReplicaId,
4096 bool,
4097 CursorShape,
4098 impl Iterator<Item = &Selection<Anchor>> + '_,
4099 ),
4100 > + '_ {
4101 self.remote_selections
4102 .iter()
4103 .filter(move |(replica_id, set)| {
4104 (include_local || **replica_id != self.text.replica_id())
4105 && !set.selections.is_empty()
4106 })
4107 .map(move |(replica_id, set)| {
4108 let start_ix = match set.selections.binary_search_by(|probe| {
4109 probe.end.cmp(&range.start, self).then(Ordering::Greater)
4110 }) {
4111 Ok(ix) | Err(ix) => ix,
4112 };
4113 let end_ix = match set.selections.binary_search_by(|probe| {
4114 probe.start.cmp(&range.end, self).then(Ordering::Less)
4115 }) {
4116 Ok(ix) | Err(ix) => ix,
4117 };
4118
4119 (
4120 *replica_id,
4121 set.line_mode,
4122 set.cursor_shape,
4123 set.selections[start_ix..end_ix].iter(),
4124 )
4125 })
4126 }
4127
4128 /// Returns if the buffer contains any diagnostics.
4129 pub fn has_diagnostics(&self) -> bool {
4130 !self.diagnostics.is_empty()
4131 }
4132
4133 /// Returns all the diagnostics intersecting the given range.
4134 pub fn diagnostics_in_range<'a, T, O>(
4135 &'a self,
4136 search_range: Range<T>,
4137 reversed: bool,
4138 ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
4139 where
4140 T: 'a + Clone + ToOffset,
4141 O: 'a + FromAnchor,
4142 {
4143 let mut iterators: Vec<_> = self
4144 .diagnostics
4145 .iter()
4146 .map(|(_, collection)| {
4147 collection
4148 .range::<T, text::Anchor>(search_range.clone(), self, true, reversed)
4149 .peekable()
4150 })
4151 .collect();
4152
4153 std::iter::from_fn(move || {
4154 let (next_ix, _) = iterators
4155 .iter_mut()
4156 .enumerate()
4157 .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
4158 .min_by(|(_, a), (_, b)| {
4159 let cmp = a
4160 .range
4161 .start
4162 .cmp(&b.range.start, self)
4163 // when range is equal, sort by diagnostic severity
4164 .then(a.diagnostic.severity.cmp(&b.diagnostic.severity))
4165 // and stabilize order with group_id
4166 .then(a.diagnostic.group_id.cmp(&b.diagnostic.group_id));
4167 if reversed { cmp.reverse() } else { cmp }
4168 })?;
4169 iterators[next_ix]
4170 .next()
4171 .map(|DiagnosticEntry { range, diagnostic }| DiagnosticEntry {
4172 diagnostic,
4173 range: FromAnchor::from_anchor(&range.start, self)
4174 ..FromAnchor::from_anchor(&range.end, self),
4175 })
4176 })
4177 }
4178
4179 /// Returns all the diagnostic groups associated with the given
4180 /// language server ID. If no language server ID is provided,
4181 /// all diagnostics groups are returned.
4182 pub fn diagnostic_groups(
4183 &self,
4184 language_server_id: Option<LanguageServerId>,
4185 ) -> Vec<(LanguageServerId, DiagnosticGroup<Anchor>)> {
4186 let mut groups = Vec::new();
4187
4188 if let Some(language_server_id) = language_server_id {
4189 if let Ok(ix) = self
4190 .diagnostics
4191 .binary_search_by_key(&language_server_id, |e| e.0)
4192 {
4193 self.diagnostics[ix]
4194 .1
4195 .groups(language_server_id, &mut groups, self);
4196 }
4197 } else {
4198 for (language_server_id, diagnostics) in self.diagnostics.iter() {
4199 diagnostics.groups(*language_server_id, &mut groups, self);
4200 }
4201 }
4202
4203 groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
4204 let a_start = &group_a.entries[group_a.primary_ix].range.start;
4205 let b_start = &group_b.entries[group_b.primary_ix].range.start;
4206 a_start.cmp(b_start, self).then_with(|| id_a.cmp(id_b))
4207 });
4208
4209 groups
4210 }
4211
4212 /// Returns an iterator over the diagnostics for the given group.
4213 pub fn diagnostic_group<O>(
4214 &self,
4215 group_id: usize,
4216 ) -> impl Iterator<Item = DiagnosticEntry<O>> + '_
4217 where
4218 O: FromAnchor + 'static,
4219 {
4220 self.diagnostics
4221 .iter()
4222 .flat_map(move |(_, set)| set.group(group_id, self))
4223 }
4224
4225 /// An integer version number that accounts for all updates besides
4226 /// the buffer's text itself (which is versioned via a version vector).
4227 pub fn non_text_state_update_count(&self) -> usize {
4228 self.non_text_state_update_count
4229 }
4230
4231 /// Returns a snapshot of underlying file.
4232 pub fn file(&self) -> Option<&Arc<dyn File>> {
4233 self.file.as_ref()
4234 }
4235
4236 /// Resolves the file path (relative to the worktree root) associated with the underlying file.
4237 pub fn resolve_file_path(&self, cx: &App, include_root: bool) -> Option<PathBuf> {
4238 if let Some(file) = self.file() {
4239 if file.path().file_name().is_none() || include_root {
4240 Some(file.full_path(cx))
4241 } else {
4242 Some(file.path().to_path_buf())
4243 }
4244 } else {
4245 None
4246 }
4247 }
4248
4249 pub fn words_in_range(&self, query: WordsQuery) -> BTreeMap<String, Range<Anchor>> {
4250 let query_str = query.fuzzy_contents;
4251 if query_str.map_or(false, |query| query.is_empty()) {
4252 return BTreeMap::default();
4253 }
4254
4255 let classifier = CharClassifier::new(self.language.clone().map(|language| LanguageScope {
4256 language,
4257 override_id: None,
4258 }));
4259
4260 let mut query_ix = 0;
4261 let query_chars = query_str.map(|query| query.chars().collect::<Vec<_>>());
4262 let query_len = query_chars.as_ref().map_or(0, |query| query.len());
4263
4264 let mut words = BTreeMap::default();
4265 let mut current_word_start_ix = None;
4266 let mut chunk_ix = query.range.start;
4267 for chunk in self.chunks(query.range, false) {
4268 for (i, c) in chunk.text.char_indices() {
4269 let ix = chunk_ix + i;
4270 if classifier.is_word(c) {
4271 if current_word_start_ix.is_none() {
4272 current_word_start_ix = Some(ix);
4273 }
4274
4275 if let Some(query_chars) = &query_chars {
4276 if query_ix < query_len {
4277 if c.to_lowercase().eq(query_chars[query_ix].to_lowercase()) {
4278 query_ix += 1;
4279 }
4280 }
4281 }
4282 continue;
4283 } else if let Some(word_start) = current_word_start_ix.take() {
4284 if query_ix == query_len {
4285 let word_range = self.anchor_before(word_start)..self.anchor_after(ix);
4286 let mut word_text = self.text_for_range(word_start..ix).peekable();
4287 let first_char = word_text
4288 .peek()
4289 .and_then(|first_chunk| first_chunk.chars().next());
4290 // Skip empty and "words" starting with digits as a heuristic to reduce useless completions
4291 if !query.skip_digits
4292 || first_char.map_or(true, |first_char| !first_char.is_digit(10))
4293 {
4294 words.insert(word_text.collect(), word_range);
4295 }
4296 }
4297 }
4298 query_ix = 0;
4299 }
4300 chunk_ix += chunk.text.len();
4301 }
4302
4303 words
4304 }
4305}
4306
4307pub struct WordsQuery<'a> {
4308 /// Only returns words with all chars from the fuzzy string in them.
4309 pub fuzzy_contents: Option<&'a str>,
4310 /// Skips words that start with a digit.
4311 pub skip_digits: bool,
4312 /// Buffer offset range, to look for words.
4313 pub range: Range<usize>,
4314}
4315
4316fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
4317 indent_size_for_text(text.chars_at(Point::new(row, 0)))
4318}
4319
4320fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
4321 let mut result = IndentSize::spaces(0);
4322 for c in text {
4323 let kind = match c {
4324 ' ' => IndentKind::Space,
4325 '\t' => IndentKind::Tab,
4326 _ => break,
4327 };
4328 if result.len == 0 {
4329 result.kind = kind;
4330 }
4331 result.len += 1;
4332 }
4333 result
4334}
4335
4336impl Clone for BufferSnapshot {
4337 fn clone(&self) -> Self {
4338 Self {
4339 text: self.text.clone(),
4340 syntax: self.syntax.clone(),
4341 file: self.file.clone(),
4342 remote_selections: self.remote_selections.clone(),
4343 diagnostics: self.diagnostics.clone(),
4344 language: self.language.clone(),
4345 non_text_state_update_count: self.non_text_state_update_count,
4346 }
4347 }
4348}
4349
4350impl Deref for BufferSnapshot {
4351 type Target = text::BufferSnapshot;
4352
4353 fn deref(&self) -> &Self::Target {
4354 &self.text
4355 }
4356}
4357
4358unsafe impl Send for BufferChunks<'_> {}
4359
4360impl<'a> BufferChunks<'a> {
4361 pub(crate) fn new(
4362 text: &'a Rope,
4363 range: Range<usize>,
4364 syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
4365 diagnostics: bool,
4366 buffer_snapshot: Option<&'a BufferSnapshot>,
4367 ) -> Self {
4368 let mut highlights = None;
4369 if let Some((captures, highlight_maps)) = syntax {
4370 highlights = Some(BufferChunkHighlights {
4371 captures,
4372 next_capture: None,
4373 stack: Default::default(),
4374 highlight_maps,
4375 })
4376 }
4377
4378 let diagnostic_endpoints = diagnostics.then(|| Vec::new().into_iter().peekable());
4379 let chunks = text.chunks_in_range(range.clone());
4380
4381 let mut this = BufferChunks {
4382 range,
4383 buffer_snapshot,
4384 chunks,
4385 diagnostic_endpoints,
4386 error_depth: 0,
4387 warning_depth: 0,
4388 information_depth: 0,
4389 hint_depth: 0,
4390 unnecessary_depth: 0,
4391 highlights,
4392 };
4393 this.initialize_diagnostic_endpoints();
4394 this
4395 }
4396
4397 /// Seeks to the given byte offset in the buffer.
4398 pub fn seek(&mut self, range: Range<usize>) {
4399 let old_range = std::mem::replace(&mut self.range, range.clone());
4400 self.chunks.set_range(self.range.clone());
4401 if let Some(highlights) = self.highlights.as_mut() {
4402 if old_range.start <= self.range.start && old_range.end >= self.range.end {
4403 // Reuse existing highlights stack, as the new range is a subrange of the old one.
4404 highlights
4405 .stack
4406 .retain(|(end_offset, _)| *end_offset > range.start);
4407 if let Some(capture) = &highlights.next_capture {
4408 if range.start >= capture.node.start_byte() {
4409 let next_capture_end = capture.node.end_byte();
4410 if range.start < next_capture_end {
4411 highlights.stack.push((
4412 next_capture_end,
4413 highlights.highlight_maps[capture.grammar_index].get(capture.index),
4414 ));
4415 }
4416 highlights.next_capture.take();
4417 }
4418 }
4419 } else if let Some(snapshot) = self.buffer_snapshot {
4420 let (captures, highlight_maps) = snapshot.get_highlights(self.range.clone());
4421 *highlights = BufferChunkHighlights {
4422 captures,
4423 next_capture: None,
4424 stack: Default::default(),
4425 highlight_maps,
4426 };
4427 } else {
4428 // We cannot obtain new highlights for a language-aware buffer iterator, as we don't have a buffer snapshot.
4429 // Seeking such BufferChunks is not supported.
4430 debug_assert!(
4431 false,
4432 "Attempted to seek on a language-aware buffer iterator without associated buffer snapshot"
4433 );
4434 }
4435
4436 highlights.captures.set_byte_range(self.range.clone());
4437 self.initialize_diagnostic_endpoints();
4438 }
4439 }
4440
4441 fn initialize_diagnostic_endpoints(&mut self) {
4442 if let Some(diagnostics) = self.diagnostic_endpoints.as_mut() {
4443 if let Some(buffer) = self.buffer_snapshot {
4444 let mut diagnostic_endpoints = Vec::new();
4445 for entry in buffer.diagnostics_in_range::<_, usize>(self.range.clone(), false) {
4446 diagnostic_endpoints.push(DiagnosticEndpoint {
4447 offset: entry.range.start,
4448 is_start: true,
4449 severity: entry.diagnostic.severity,
4450 is_unnecessary: entry.diagnostic.is_unnecessary,
4451 });
4452 diagnostic_endpoints.push(DiagnosticEndpoint {
4453 offset: entry.range.end,
4454 is_start: false,
4455 severity: entry.diagnostic.severity,
4456 is_unnecessary: entry.diagnostic.is_unnecessary,
4457 });
4458 }
4459 diagnostic_endpoints
4460 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
4461 *diagnostics = diagnostic_endpoints.into_iter().peekable();
4462 self.hint_depth = 0;
4463 self.error_depth = 0;
4464 self.warning_depth = 0;
4465 self.information_depth = 0;
4466 }
4467 }
4468 }
4469
4470 /// The current byte offset in the buffer.
4471 pub fn offset(&self) -> usize {
4472 self.range.start
4473 }
4474
4475 pub fn range(&self) -> Range<usize> {
4476 self.range.clone()
4477 }
4478
4479 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
4480 let depth = match endpoint.severity {
4481 DiagnosticSeverity::ERROR => &mut self.error_depth,
4482 DiagnosticSeverity::WARNING => &mut self.warning_depth,
4483 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
4484 DiagnosticSeverity::HINT => &mut self.hint_depth,
4485 _ => return,
4486 };
4487 if endpoint.is_start {
4488 *depth += 1;
4489 } else {
4490 *depth -= 1;
4491 }
4492
4493 if endpoint.is_unnecessary {
4494 if endpoint.is_start {
4495 self.unnecessary_depth += 1;
4496 } else {
4497 self.unnecessary_depth -= 1;
4498 }
4499 }
4500 }
4501
4502 fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
4503 if self.error_depth > 0 {
4504 Some(DiagnosticSeverity::ERROR)
4505 } else if self.warning_depth > 0 {
4506 Some(DiagnosticSeverity::WARNING)
4507 } else if self.information_depth > 0 {
4508 Some(DiagnosticSeverity::INFORMATION)
4509 } else if self.hint_depth > 0 {
4510 Some(DiagnosticSeverity::HINT)
4511 } else {
4512 None
4513 }
4514 }
4515
4516 fn current_code_is_unnecessary(&self) -> bool {
4517 self.unnecessary_depth > 0
4518 }
4519}
4520
4521impl<'a> Iterator for BufferChunks<'a> {
4522 type Item = Chunk<'a>;
4523
4524 fn next(&mut self) -> Option<Self::Item> {
4525 let mut next_capture_start = usize::MAX;
4526 let mut next_diagnostic_endpoint = usize::MAX;
4527
4528 if let Some(highlights) = self.highlights.as_mut() {
4529 while let Some((parent_capture_end, _)) = highlights.stack.last() {
4530 if *parent_capture_end <= self.range.start {
4531 highlights.stack.pop();
4532 } else {
4533 break;
4534 }
4535 }
4536
4537 if highlights.next_capture.is_none() {
4538 highlights.next_capture = highlights.captures.next();
4539 }
4540
4541 while let Some(capture) = highlights.next_capture.as_ref() {
4542 if self.range.start < capture.node.start_byte() {
4543 next_capture_start = capture.node.start_byte();
4544 break;
4545 } else {
4546 let highlight_id =
4547 highlights.highlight_maps[capture.grammar_index].get(capture.index);
4548 highlights
4549 .stack
4550 .push((capture.node.end_byte(), highlight_id));
4551 highlights.next_capture = highlights.captures.next();
4552 }
4553 }
4554 }
4555
4556 let mut diagnostic_endpoints = std::mem::take(&mut self.diagnostic_endpoints);
4557 if let Some(diagnostic_endpoints) = diagnostic_endpoints.as_mut() {
4558 while let Some(endpoint) = diagnostic_endpoints.peek().copied() {
4559 if endpoint.offset <= self.range.start {
4560 self.update_diagnostic_depths(endpoint);
4561 diagnostic_endpoints.next();
4562 } else {
4563 next_diagnostic_endpoint = endpoint.offset;
4564 break;
4565 }
4566 }
4567 }
4568 self.diagnostic_endpoints = diagnostic_endpoints;
4569
4570 if let Some(chunk) = self.chunks.peek() {
4571 let chunk_start = self.range.start;
4572 let mut chunk_end = (self.chunks.offset() + chunk.len())
4573 .min(next_capture_start)
4574 .min(next_diagnostic_endpoint);
4575 let mut highlight_id = None;
4576 if let Some(highlights) = self.highlights.as_ref() {
4577 if let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last() {
4578 chunk_end = chunk_end.min(*parent_capture_end);
4579 highlight_id = Some(*parent_highlight_id);
4580 }
4581 }
4582
4583 let slice =
4584 &chunk[chunk_start - self.chunks.offset()..chunk_end - self.chunks.offset()];
4585 self.range.start = chunk_end;
4586 if self.range.start == self.chunks.offset() + chunk.len() {
4587 self.chunks.next().unwrap();
4588 }
4589
4590 Some(Chunk {
4591 text: slice,
4592 syntax_highlight_id: highlight_id,
4593 diagnostic_severity: self.current_diagnostic_severity(),
4594 is_unnecessary: self.current_code_is_unnecessary(),
4595 ..Chunk::default()
4596 })
4597 } else {
4598 None
4599 }
4600 }
4601}
4602
4603impl operation_queue::Operation for Operation {
4604 fn lamport_timestamp(&self) -> clock::Lamport {
4605 match self {
4606 Operation::Buffer(_) => {
4607 unreachable!("buffer operations should never be deferred at this layer")
4608 }
4609 Operation::UpdateDiagnostics {
4610 lamport_timestamp, ..
4611 }
4612 | Operation::UpdateSelections {
4613 lamport_timestamp, ..
4614 }
4615 | Operation::UpdateCompletionTriggers {
4616 lamport_timestamp, ..
4617 } => *lamport_timestamp,
4618 }
4619 }
4620}
4621
4622impl Default for Diagnostic {
4623 fn default() -> Self {
4624 Self {
4625 source: Default::default(),
4626 code: None,
4627 code_description: None,
4628 severity: DiagnosticSeverity::ERROR,
4629 message: Default::default(),
4630 markdown: None,
4631 group_id: 0,
4632 is_primary: false,
4633 is_disk_based: false,
4634 is_unnecessary: false,
4635 data: None,
4636 }
4637 }
4638}
4639
4640impl IndentSize {
4641 /// Returns an [`IndentSize`] representing the given spaces.
4642 pub fn spaces(len: u32) -> Self {
4643 Self {
4644 len,
4645 kind: IndentKind::Space,
4646 }
4647 }
4648
4649 /// Returns an [`IndentSize`] representing a tab.
4650 pub fn tab() -> Self {
4651 Self {
4652 len: 1,
4653 kind: IndentKind::Tab,
4654 }
4655 }
4656
4657 /// An iterator over the characters represented by this [`IndentSize`].
4658 pub fn chars(&self) -> impl Iterator<Item = char> {
4659 iter::repeat(self.char()).take(self.len as usize)
4660 }
4661
4662 /// The character representation of this [`IndentSize`].
4663 pub fn char(&self) -> char {
4664 match self.kind {
4665 IndentKind::Space => ' ',
4666 IndentKind::Tab => '\t',
4667 }
4668 }
4669
4670 /// Consumes the current [`IndentSize`] and returns a new one that has
4671 /// been shrunk or enlarged by the given size along the given direction.
4672 pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
4673 match direction {
4674 Ordering::Less => {
4675 if self.kind == size.kind && self.len >= size.len {
4676 self.len -= size.len;
4677 }
4678 }
4679 Ordering::Equal => {}
4680 Ordering::Greater => {
4681 if self.len == 0 {
4682 self = size;
4683 } else if self.kind == size.kind {
4684 self.len += size.len;
4685 }
4686 }
4687 }
4688 self
4689 }
4690
4691 pub fn len_with_expanded_tabs(&self, tab_size: NonZeroU32) -> usize {
4692 match self.kind {
4693 IndentKind::Space => self.len as usize,
4694 IndentKind::Tab => self.len as usize * tab_size.get() as usize,
4695 }
4696 }
4697}
4698
4699#[cfg(any(test, feature = "test-support"))]
4700pub struct TestFile {
4701 pub path: Arc<Path>,
4702 pub root_name: String,
4703 pub local_root: Option<PathBuf>,
4704}
4705
4706#[cfg(any(test, feature = "test-support"))]
4707impl File for TestFile {
4708 fn path(&self) -> &Arc<Path> {
4709 &self.path
4710 }
4711
4712 fn full_path(&self, _: &gpui::App) -> PathBuf {
4713 PathBuf::from(&self.root_name).join(self.path.as_ref())
4714 }
4715
4716 fn as_local(&self) -> Option<&dyn LocalFile> {
4717 if self.local_root.is_some() {
4718 Some(self)
4719 } else {
4720 None
4721 }
4722 }
4723
4724 fn disk_state(&self) -> DiskState {
4725 unimplemented!()
4726 }
4727
4728 fn file_name<'a>(&'a self, _: &'a gpui::App) -> &'a std::ffi::OsStr {
4729 self.path().file_name().unwrap_or(self.root_name.as_ref())
4730 }
4731
4732 fn worktree_id(&self, _: &App) -> WorktreeId {
4733 WorktreeId::from_usize(0)
4734 }
4735
4736 fn to_proto(&self, _: &App) -> rpc::proto::File {
4737 unimplemented!()
4738 }
4739
4740 fn is_private(&self) -> bool {
4741 false
4742 }
4743}
4744
4745#[cfg(any(test, feature = "test-support"))]
4746impl LocalFile for TestFile {
4747 fn abs_path(&self, _cx: &App) -> PathBuf {
4748 PathBuf::from(self.local_root.as_ref().unwrap())
4749 .join(&self.root_name)
4750 .join(self.path.as_ref())
4751 }
4752
4753 fn load(&self, _cx: &App) -> Task<Result<String>> {
4754 unimplemented!()
4755 }
4756
4757 fn load_bytes(&self, _cx: &App) -> Task<Result<Vec<u8>>> {
4758 unimplemented!()
4759 }
4760}
4761
4762pub(crate) fn contiguous_ranges(
4763 values: impl Iterator<Item = u32>,
4764 max_len: usize,
4765) -> impl Iterator<Item = Range<u32>> {
4766 let mut values = values;
4767 let mut current_range: Option<Range<u32>> = None;
4768 std::iter::from_fn(move || {
4769 loop {
4770 if let Some(value) = values.next() {
4771 if let Some(range) = &mut current_range {
4772 if value == range.end && range.len() < max_len {
4773 range.end += 1;
4774 continue;
4775 }
4776 }
4777
4778 let prev_range = current_range.clone();
4779 current_range = Some(value..(value + 1));
4780 if prev_range.is_some() {
4781 return prev_range;
4782 }
4783 } else {
4784 return current_range.take();
4785 }
4786 }
4787 })
4788}
4789
4790#[derive(Default, Debug)]
4791pub struct CharClassifier {
4792 scope: Option<LanguageScope>,
4793 for_completion: bool,
4794 ignore_punctuation: bool,
4795}
4796
4797impl CharClassifier {
4798 pub fn new(scope: Option<LanguageScope>) -> Self {
4799 Self {
4800 scope,
4801 for_completion: false,
4802 ignore_punctuation: false,
4803 }
4804 }
4805
4806 pub fn for_completion(self, for_completion: bool) -> Self {
4807 Self {
4808 for_completion,
4809 ..self
4810 }
4811 }
4812
4813 pub fn ignore_punctuation(self, ignore_punctuation: bool) -> Self {
4814 Self {
4815 ignore_punctuation,
4816 ..self
4817 }
4818 }
4819
4820 pub fn is_whitespace(&self, c: char) -> bool {
4821 self.kind(c) == CharKind::Whitespace
4822 }
4823
4824 pub fn is_word(&self, c: char) -> bool {
4825 self.kind(c) == CharKind::Word
4826 }
4827
4828 pub fn is_punctuation(&self, c: char) -> bool {
4829 self.kind(c) == CharKind::Punctuation
4830 }
4831
4832 pub fn kind_with(&self, c: char, ignore_punctuation: bool) -> CharKind {
4833 if c.is_alphanumeric() || c == '_' {
4834 return CharKind::Word;
4835 }
4836
4837 if let Some(scope) = &self.scope {
4838 let characters = if self.for_completion {
4839 scope.completion_query_characters()
4840 } else {
4841 scope.word_characters()
4842 };
4843 if let Some(characters) = characters {
4844 if characters.contains(&c) {
4845 return CharKind::Word;
4846 }
4847 }
4848 }
4849
4850 if c.is_whitespace() {
4851 return CharKind::Whitespace;
4852 }
4853
4854 if ignore_punctuation {
4855 CharKind::Word
4856 } else {
4857 CharKind::Punctuation
4858 }
4859 }
4860
4861 pub fn kind(&self, c: char) -> CharKind {
4862 self.kind_with(c, self.ignore_punctuation)
4863 }
4864}
4865
4866/// Find all of the ranges of whitespace that occur at the ends of lines
4867/// in the given rope.
4868///
4869/// This could also be done with a regex search, but this implementation
4870/// avoids copying text.
4871pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
4872 let mut ranges = Vec::new();
4873
4874 let mut offset = 0;
4875 let mut prev_chunk_trailing_whitespace_range = 0..0;
4876 for chunk in rope.chunks() {
4877 let mut prev_line_trailing_whitespace_range = 0..0;
4878 for (i, line) in chunk.split('\n').enumerate() {
4879 let line_end_offset = offset + line.len();
4880 let trimmed_line_len = line.trim_end_matches([' ', '\t']).len();
4881 let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
4882
4883 if i == 0 && trimmed_line_len == 0 {
4884 trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
4885 }
4886 if !prev_line_trailing_whitespace_range.is_empty() {
4887 ranges.push(prev_line_trailing_whitespace_range);
4888 }
4889
4890 offset = line_end_offset + 1;
4891 prev_line_trailing_whitespace_range = trailing_whitespace_range;
4892 }
4893
4894 offset -= 1;
4895 prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
4896 }
4897
4898 if !prev_chunk_trailing_whitespace_range.is_empty() {
4899 ranges.push(prev_chunk_trailing_whitespace_range);
4900 }
4901
4902 ranges
4903}